From 30c756d3e72e14b9e089f5916de3de41f7df5201 Mon Sep 17 00:00:00 2001 From: Daylin Morgan Date: Tue, 2 Jan 2024 14:44:38 -0600 Subject: [PATCH] feat: vendor packaging + add support for requires python check This is a mostly small change albeit with addition of more embedded code from pypa/packaging to simplify supporting a requires-python check with PEP723 style syntax. --- .gitignore | 2 + .pre-commit-config.yaml | 2 +- examples/stopwatch.py | 0 noxfile.py | 2 +- pdm.lock | 92 +-- pyproject.toml | 5 +- scripts/vendor-tomli.py | 87 --- scripts/vendor.py | 221 ++++++ src/viv/viv.py | 1475 +++++++++++++++++++++++++++++++-------- tests/test_vendored.py | 17 + 10 files changed, 1472 insertions(+), 431 deletions(-) mode change 100755 => 100644 examples/stopwatch.py delete mode 100644 scripts/vendor-tomli.py create mode 100755 scripts/vendor.py create mode 100644 tests/test_vendored.py diff --git a/.gitignore b/.gitignore index 25af996..35959d2 100644 --- a/.gitignore +++ b/.gitignore @@ -176,3 +176,5 @@ docs/viv.py docs/svgs docs/public /tests/.viv-cache +/scripts/tomli +/scripts/packaging diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 06ddf52..76678aa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,6 +4,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.1.10 hooks: + - id: ruff-format - id: ruff args: [ --fix ] - - id: ruff-format diff --git a/examples/stopwatch.py b/examples/stopwatch.py old mode 100755 new mode 100644 diff --git a/noxfile.py b/noxfile.py index 7b3093e..5b7a81d 100644 --- a/noxfile.py +++ b/noxfile.py @@ -58,4 +58,4 @@ def release(session): @nox.session(python=["3.8", "3.9", "3.10", "3.11"]) def test(session): pdm_install(session, "test") - session.run("pytest") + session.run("pytest", "tests/") diff --git a/pdm.lock b/pdm.lock index 83c5ae2..5e9821f 100644 --- a/pdm.lock +++ b/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "dev", "docs", "test"] strategy = ["cross_platform"] lock_version = "4.4.1" -content_hash = "sha256:c9e5da8cc42f30380075c09b37765d5687a14daa61ee3323af51ee1aa73f57a5" +content_hash = "sha256:67b99f093f433bb1a56dc393d51c615b65e94ea9ad397a5460c5a2a29a21cb22" [[package]] name = "alabaster" @@ -17,6 +17,16 @@ files = [ {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, ] +[[package]] +name = "astor" +version = "0.8.1" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +summary = "Read/rewrite/write Python ASTs" +files = [ + {file = "astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5"}, + {file = "astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e"}, +] + [[package]] name = "babel" version = "2.14.0" @@ -216,15 +226,15 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.0" +version = "7.0.1" requires_python = ">=3.8" summary = "Read metadata from Python packages" dependencies = [ "zipp>=0.5", ] files = [ - {file = "importlib_metadata-7.0.0-py3-none-any.whl", hash = "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"}, - {file = "importlib_metadata-7.0.0.tar.gz", hash = "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7"}, + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, ] [[package]] @@ -360,7 +370,7 @@ files = [ [[package]] name = "mypy" -version = "1.7.1" +version = "1.8.0" requires_python = ">=3.8" summary = "Optional static typing for Python" dependencies = [ @@ -369,33 +379,33 @@ dependencies = [ "typing-extensions>=4.1.0", ] files = [ - {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"}, - {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"}, - {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"}, - {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"}, - {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"}, - {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"}, - {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"}, - {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"}, - {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"}, - {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"}, - {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"}, - {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"}, - {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"}, - {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"}, - {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"}, - {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"}, - {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"}, - {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"}, - {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, ] [[package]] @@ -507,7 +517,7 @@ files = [ [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" requires_python = ">=3.7" summary = "pytest: simple powerful testing with Python" dependencies = [ @@ -519,8 +529,8 @@ dependencies = [ "tomli>=1.0.0; python_version < \"3.11\"", ] files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [[package]] @@ -624,25 +634,25 @@ files = [ [[package]] name = "setuptools" -version = "69.0.2" +version = "69.0.3" requires_python = ">=3.8" summary = "Easily download, build, install, upgrade, and uninstall Python packages" files = [ - {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, - {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, ] [[package]] name = "shibuya" -version = "2023.10.26" +version = "2024.1.2" requires_python = ">=3.7" summary = "A clean, responsive, and customizable Sphinx documentation theme with light/dark mode." dependencies = [ "Sphinx", ] files = [ - {file = "shibuya-2023.10.26-py3-none-any.whl", hash = "sha256:e8a8647005a410d929fe4fdd3af2b88c156efbca8582ce3803ebd7771d08cebb"}, - {file = "shibuya-2023.10.26.tar.gz", hash = "sha256:878f866a8720776c878ff1d9e17f7731cf8923859ff6fec77999b469477764a1"}, + {file = "shibuya-2024.1.2-py3-none-any.whl", hash = "sha256:27533d5ed93881c3de3eec9c9f3004fa2fb3120c73869ffc2c719fda65cc78ee"}, + {file = "shibuya-2024.1.2.tar.gz", hash = "sha256:12937ad82c660db660a4ce196bd126630c9bd90d96a8593203eefd2ad255957e"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index 88e23a9..29ec599 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,8 +24,9 @@ version = { source = "scm" } [tool.pdm.dev-dependencies] dev = [ - "pre-commit>=3", - "mypy>=0.991", + "pre-commit>=3", + "mypy>=0.991", + "astor>=0.8.1", ] docs = [ "sphinx", diff --git a/scripts/vendor-tomli.py b/scripts/vendor-tomli.py deleted file mode 100644 index 8318fb6..0000000 --- a/scripts/vendor-tomli.py +++ /dev/null @@ -1,87 +0,0 @@ -import re -from pathlib import Path - -FILES = ( - ("types", [[7, 11]]), - ("re", [[14, 107]]), - ("parser", [[20, 691]]), -) - -TOMLI_DELIM = ("##### START VENDORED TOMLI #####", "##### END VENDORED TOMLI #####") - -TOMLI_PREFACE = """ -# MODIFIED FROM https://github.com/hukkin/tomli -# see below for original license -# SPDX-License-Identifier: MIT -# SPDX-FileCopyrightText: 2021 Taneli Hukkinen -# Licensed to PSF under a Contributor Agreement. -""" - -VENDORED_IMPORTS = """ -import string # noqa -from collections.abc import Iterable # noqa -from functools import lru_cache # noqa -from datetime import date, datetime, time, timedelta, timezone, tzinfo # noqa -from types import MappingProxyType # noqa -from typing import IO, Any, Callable, NamedTuple # noqa -""" - -# REMOVE FOR ACTUAL VENDORED VERSION -tomli_text = VENDORED_IMPORTS -for f, slices in FILES: - text = Path(f"./tomli/src/tomli/_{f}.py").read_text() - for indices in slices: - tomli_text = "\n".join( - ( - tomli_text, - # black can add back spaces if it wants - *[ - line - for line in text.splitlines()[slice(*indices)] - if line.strip("\r\n") - ], - ) - ) - -IDENT_PATTERN = r"^(?P[A-Z_]*) =" -FUNC_PATTERN = r"^def (?P[a-zA-Z_]+)\(" - -idents = re.findall(IDENT_PATTERN, tomli_text, re.MULTILINE) -funcs = re.findall(FUNC_PATTERN, tomli_text, re.MULTILINE) - - -# TODO: USE ONE LOOP? -for pat in idents + funcs: - tomli_text = re.sub(f"(? str: + return f"#### START VENDORED {self.name.upper()} ####" + + @property + def end_delim(self) -> str: + return f"#### END VENDORED {self.name.upper()} ####" + + def generate_vendored_source(self): + self.src_text = "" + for f, slices in self.files: + og_text = (self.basepath / f"{f}.py").read_text() + for indices in slices: + self.src_text = "\n".join( + ( + self.src_text, + *[ + line + for line in og_text.splitlines()[slice(*indices)] + if line.strip("\r\n") + ], + ) + ) + + def replace_identifiers(self): + patterns = set.union( + *[ + set(re.findall(regex, self.src_text, re.MULTILINE)) + for regex in ( + r"^class (?P[a-zA-Z_]*)(?:\(.*\))?:", + r"^(?P[a-zA-Z_]*) =", + r"^def (?P[a-zA-Z_]+)\(", + ) + ] + ) - { + "Key", + } # prevent KeyError false positive by leaving Key alone + + for pat in patterns: + self.src_text = re.sub( + r'(?P[\s("\[={])' + pat, + f"\gv_{self.name}_{pat}", + self.src_text, + ) + + def insert(self, base_text: str) -> str: + start, rest = re.split(self.start_delim, base_text) + _, rest = re.split(self.end_delim, base_text) + src = textwrap.indent( + remove_docs_and_comments(self.src_text.strip()), + prefix=" " * (4 if self.indent else 0), + ) + return "\n".join( + ( + start.strip(), + "\n", + self.start_delim, + self.prefix + self.imports + src + self.suffix, + self.end_delim, + "\n", + rest.strip(), + ) + ) + + +PACKAGES = [ + Package( + name="packaging", + url="https://github.com/pypa/packaging.git", + rev="23.2", + files=( + ("_structures", [[5, 61]]), + ("version", [[17, 563]]), + ("utils", [[54, 100]]), + ("specifiers", [[28, 1030]]), + ), + basepath=Path(__file__).parent / "packaging/src/packaging", + prefix=""" +# MODIFIED FROM https://github.com/pypa/packaging +# see repo for original licenses +# This software is made available under the terms of *either* of the licenses +# found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made +# under the terms of *both* these licenses. +""", + imports=""" +import abc # noqa +import itertools # noqa +import re # noqa +from typing import ( # noqa + Any, + Callable, + Iterable, + Iterator, + List, + NamedTuple, + Optional, + Set, + SupportsInt, + Tuple, + TypeVar, + Union, +) +""", + suffix=""" +Version = v_packaging_Version +SpecifierSet = v_packaging_SpecifierSet +""", + ), + Package( + name="tomli", + url="https://github.com/hukkin/tomli.git", + rev="2.0.1", + files=( + ("_types", [[7, 11]]), + ("_re", [[14, 107]]), + ("_parser", [[20, 691]]), + ), + prefix=""" +try: + from tomllib import loads as toml_loads +except ImportError: + # MODIFIED FROM https://github.com/hukkin/tomli + # see below for original license + # SPDX-License-Identifier: MIT + # SPDX-FileCopyrightText: 2021 Taneli Hukkinen + # Licensed to PSF under a Contributor Agreement. +""", + imports=""" + import string # noqa + from collections.abc import Iterable # noqa + from functools import lru_cache # noqa + from datetime import date, datetime, time, timedelta, timezone, tzinfo # noqa + from io import BinaryIO + from types import MappingProxyType # noqa + from typing import IO, Any, Callable, NamedTuple # noqa +""", + basepath=Path(__file__).parent / "tomli/src/tomli", + suffix=""" + toml_loads = v_tomli_loads +""", + indent=True, + ), +] + + +def main(): + viv_source_path = Path(__file__).parent.parent / "src/viv/viv.py" + viv_source = viv_source_path.read_text() + + for pkg in PACKAGES: + viv_source = pkg.insert(viv_source) + + viv_source_path.write_text(viv_source) + + +if __name__ == "__main__": + main() diff --git a/src/viv/viv.py b/src/viv/viv.py index f4b2679..15cea97 100755 --- a/src/viv/viv.py +++ b/src/viv/viv.py @@ -56,7 +56,8 @@ from typing import ( __version__ = "2023.1003-pep723" -##### START VENDORED TOMLI ##### +#### START VENDORED TOMLI #### + try: from tomllib import loads as toml_loads except ImportError: @@ -65,23 +66,23 @@ except ImportError: # SPDX-License-Identifier: MIT # SPDX-FileCopyrightText: 2021 Taneli Hukkinen # Licensed to PSF under a Contributor Agreement. + import string # noqa from collections.abc import Iterable # noqa from functools import lru_cache # noqa from datetime import date, datetime, time, timedelta, timezone, tzinfo # noqa + from io import BinaryIO from types import MappingProxyType # noqa from typing import IO, Any, Callable, NamedTuple # noqa - ParseFloat = Callable[[str], Any] + v_tomli_ParseFloat = Callable[[str], Any] Key = Tuple[str, ...] - Pos = int - # - 00:32:00.999999 - # - 00:32:00 - __tomli___TIME_RE_STR = ( - r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?" + v_tomli_Pos = int + v_tomli__TIME_RE_STR = ( + "([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\\.([0-9]{1,6})[0-9]*)?" ) - __tomli__RE_NUMBER = re.compile( - r""" + v_tomli_RE_NUMBER = re.compile( + """ 0 (?: x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex @@ -93,31 +94,26 @@ except ImportError: | [+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part (?P - (?:\.[0-9](?:_?[0-9])*)? # optional fractional part + (?:\\.[0-9](?:_?[0-9])*)? # optional fractional part (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part ) """, flags=re.VERBOSE, ) - __tomli__RE_LOCALTIME = re.compile(__tomli___TIME_RE_STR) - __tomli__RE_DATETIME = re.compile( - rf""" + v_tomli_RE_LOCALTIME = re.compile(v_tomli__TIME_RE_STR) + v_tomli_RE_DATETIME = re.compile( + f""" ([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27 (?: [Tt ] - {__tomli___TIME_RE_STR} + {v_tomli__TIME_RE_STR} (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset )? """, flags=re.VERBOSE, ) - def __tomli__match_to_datetime(match: re.Match) -> datetime | date: - """Convert a `__tomli__RE_DATETIME` match to `datetime.datetime` - or `datetime.date`. - Raises ValueError if the match does not correspond to a valid date - or datetime. - """ + def v_tomli_match_to_datetime(match: re.Match) -> datetime | date: ( year_str, month_str, @@ -137,69 +133,65 @@ except ImportError: hour, minute, sec = int(hour_str), int(minute_str), int(sec_str) micros = int(micros_str.ljust(6, "0")) if micros_str else 0 if offset_sign_str: - tz: tzinfo | None = __tomli__cached_tz( + tz: tzinfo | None = v_tomli_cached_tz( offset_hour_str, offset_minute_str, offset_sign_str ) elif zulu_time: tz = timezone.utc - else: # local date-time + else: tz = None return datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz) @lru_cache(maxsize=None) - def __tomli__cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone: + def v_tomli_cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone: sign = 1 if sign_str == "+" else -1 return timezone( - timedelta( - hours=sign * int(hour_str), - minutes=sign * int(minute_str), - ) + timedelta(hours=sign * int(hour_str), minutes=sign * int(minute_str)) ) - def __tomli__match_to_localtime(match: re.Match) -> time: + def v_tomli_match_to_localtime(match: re.Match) -> time: hour_str, minute_str, sec_str, micros_str = match.groups() micros = int(micros_str.ljust(6, "0")) if micros_str else 0 return time(int(hour_str), int(minute_str), int(sec_str), micros) - def __tomli__match_to_number(match: re.Match, parse_float: ParseFloat) -> Any: + def v_tomli_match_to_number( + match: re.Match, parse_float: v_tomli_ParseFloat + ) -> Any: if match.group("floatpart"): return parse_float(match.group()) return int(match.group(), 0) - __tomli__ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127)) - # Neither of these sets include quotation mark or backslash. They are - # currently handled as separate cases in the parser functions. - __tomli__ILLEGAL_BASIC_STR_CHARS = __tomli__ASCII_CTRL - frozenset("\t") - __tomli__ILLEGAL_MULTILINE_BASIC_STR_CHARS = __tomli__ASCII_CTRL - frozenset("\t\n") - __tomli__ILLEGAL_LITERAL_STR_CHARS = __tomli__ILLEGAL_BASIC_STR_CHARS - __tomli__ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ( - __tomli__ILLEGAL_MULTILINE_BASIC_STR_CHARS + v_tomli_ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127)) + v_tomli_ILLEGAL_BASIC_STR_CHARS = v_tomli_ASCII_CTRL - frozenset("\t") + v_tomli_ILLEGAL_MULTILINE_BASIC_STR_CHARS = v_tomli_ASCII_CTRL - frozenset("\t\n") + v_tomli_ILLEGAL_LITERAL_STR_CHARS = v_tomli_ILLEGAL_BASIC_STR_CHARS + v_tomli_ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ( + v_tomli_ILLEGAL_MULTILINE_BASIC_STR_CHARS ) - __tomli__ILLEGAL_COMMENT_CHARS = __tomli__ILLEGAL_BASIC_STR_CHARS - __tomli__TOML_WS = frozenset(" \t") - __tomli__TOML_WS_AND_NEWLINE = __tomli__TOML_WS | frozenset("\n") - __tomli__BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_") - __tomli__KEY_INITIAL_CHARS = __tomli__BARE_KEY_CHARS | frozenset("\"'") - __tomli__HEXDIGIT_CHARS = frozenset(string.hexdigits) - __tomli__BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType( + v_tomli_ILLEGAL_COMMENT_CHARS = v_tomli_ILLEGAL_BASIC_STR_CHARS + v_tomli_TOML_WS = frozenset(" \t") + v_tomli_TOML_WS_AND_NEWLINE = v_tomli_TOML_WS | frozenset("\n") + v_tomli_BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_") + v_tomli_KEY_INITIAL_CHARS = v_tomli_BARE_KEY_CHARS | frozenset("\"'") + v_tomli_HEXDIGIT_CHARS = frozenset(string.hexdigits) + v_tomli_BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType( { - "\\b": "\u0008", # backspace - "\\t": "\u0009", # tab - "\\n": "\u000A", # linefeed - "\\f": "\u000C", # form feed - "\\r": "\u000D", # carriage return - '\\"': "\u0022", # quote - "\\\\": "\u005C", # backslash + "\\b": "\x08", + "\\t": "\t", + "\\n": "\n", + "\\f": "\x0c", + "\\r": "\r", + '\\"': '"', + "\\\\": "\\", } ) - class TOMLDecodeError(ValueError): - """An error raised if a document is not valid TOML.""" + class v_tomli_TOMLDecodeError(ValueError): + pass - def __tomli__load( - __fp: IO[bytes], *, parse_float: ParseFloat = float + def v_tomli_load( + __fp: BinaryIO, *, parse_float: v_tomli_ParseFloat = float ) -> dict[str, Any]: - """Parse TOML from a binary file object.""" b = __fp.read() try: s = b.decode() @@ -207,32 +199,18 @@ except ImportError: raise TypeError( "File must be opened in binary mode, e.g. use `open('foo.toml', 'rb')`" ) from None - return __tomli__loads(s, parse_float=parse_float) + return v_tomli_loads(s, parse_float=parse_float) - def __tomli__loads( - __s: str, *, parse_float: ParseFloat = float - ) -> dict[str, Any]: # noqa: C901 - """Parse TOML from a string.""" - # The spec allows converting "\r\n" to "\n", even in string - # literals. Let's do so to simplify parsing. + def v_tomli_loads( + __s: str, *, parse_float: v_tomli_ParseFloat = float + ) -> dict[str, Any]: src = __s.replace("\r\n", "\n") pos = 0 - out = Output(NestedDict(), Flags()) + out = v_tomli_Output(v_tomli_NestedDict(), v_tomli_Flags()) header: Key = () - parse_float = __tomli__make_safe_parse_float(parse_float) - # Parse one statement at a time - # (typically means one line in TOML source) + parse_float = v_tomli_make_safe_parse_float(parse_float) while True: - # 1. Skip line leading whitespace - pos = __tomli__skip_chars(src, pos, __tomli__TOML_WS) - # 2. Parse rules. Expect one of the following: - # - end of file - # - end of line - # - comment - # - key/value pair - # - append dict to list (and move to its namespace) - # - create dict (and move to its namespace) - # Skip trailing whitespace when applicable. + pos = v_tomli_skip_chars(src, pos, v_tomli_TOML_WS) try: char = src[pos] except IndexError: @@ -240,9 +218,9 @@ except ImportError: if char == "\n": pos += 1 continue - if char in __tomli__KEY_INITIAL_CHARS: - pos = __tomli__key_value_rule(src, pos, out, header, parse_float) - pos = __tomli__skip_chars(src, pos, __tomli__TOML_WS) + if char in v_tomli_KEY_INITIAL_CHARS: + pos = v_tomli_key_value_rule(src, pos, out, header, parse_float) + pos = v_tomli_skip_chars(src, pos, v_tomli_TOML_WS) elif char == "[": try: second_char: str | None = src[pos + 1] @@ -250,33 +228,26 @@ except ImportError: second_char = None out.flags.finalize_pending() if second_char == "[": - pos, header = __tomli__create_list_rule(src, pos, out) + pos, header = v_tomli_create_list_rule(src, pos, out) else: - pos, header = __tomli__create_dict_rule(src, pos, out) - pos = __tomli__skip_chars(src, pos, __tomli__TOML_WS) + pos, header = v_tomli_create_dict_rule(src, pos, out) + pos = v_tomli_skip_chars(src, pos, v_tomli_TOML_WS) elif char != "#": - raise __tomli__suffixed_err(src, pos, "Invalid statement") - # 3. Skip comment - pos = __tomli__skip_comment(src, pos) - # 4. Expect end of line or end of file + raise v_tomli_suffixed_err(src, pos, "Invalid statement") + pos = v_tomli_skip_comment(src, pos) try: char = src[pos] except IndexError: break if char != "\n": - raise __tomli__suffixed_err( + raise v_tomli_suffixed_err( src, pos, "Expected newline or end of document after a statement" ) pos += 1 return out.data.dict - class Flags: - """Flags that map to parsed keys/namespaces.""" - - # Marks an immutable namespace (inline array or inline table). + class v_tomli_Flags: FROZEN = 0 - # Marks a nest that has been explicitly created and can no longer - # be opened using the "[table]" syntax. EXPLICIT_NEST = 1 def __init__(self) -> None: @@ -299,7 +270,7 @@ except ImportError: cont = cont[k]["nested"] cont.pop(key[-1], None) - def set(self, key: Key, flag: int, *, recursive: bool) -> None: # noqa: A003 + def set(self, key: Key, flag: int, *, recursive: bool) -> None: cont = self._flags key_parent, key_stem = key[:-1], key[-1] for k in key_parent: @@ -316,7 +287,7 @@ except ImportError: def is_(self, key: Key, flag: int) -> bool: if not key: - return False # document root has no flags + return False cont = self._flags for k in key[:-1]: if k not in cont: @@ -331,17 +302,11 @@ except ImportError: return flag in cont["flags"] or flag in cont["recursive_flags"] return False - class NestedDict: + class v_tomli_NestedDict: def __init__(self) -> None: - # The parsed content of the TOML document self.dict: dict[str, Any] = {} - def get_or_create_nest( - self, - key: Key, - *, - access_lists: bool = True, - ) -> dict: + def get_or_create_nest(self, key: Key, *, access_lists: bool = True) -> dict: cont: Any = self.dict for k in key: if k not in cont: @@ -364,11 +329,13 @@ except ImportError: else: cont[last_key] = [{}] - class Output(NamedTuple): - data: NestedDict - flags: Flags + class v_tomli_Output(NamedTuple): + data: v_tomli_NestedDict + flags: v_tomli_Flags - def __tomli__skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos: + def v_tomli_skip_chars( + src: str, pos: v_tomli_Pos, chars: Iterable[str] + ) -> v_tomli_Pos: try: while src[pos] in chars: pos += 1 @@ -376,145 +343,149 @@ except ImportError: pass return pos - def __tomli__skip_until( + def v_tomli_skip_until( src: str, - pos: Pos, + pos: v_tomli_Pos, expect: str, *, error_on: frozenset[str], error_on_eof: bool, - ) -> Pos: + ) -> v_tomli_Pos: try: new_pos = src.index(expect, pos) except ValueError: new_pos = len(src) if error_on_eof: - raise __tomli__suffixed_err( + raise v_tomli_suffixed_err( src, new_pos, f"Expected {expect!r}" ) from None if not error_on.isdisjoint(src[pos:new_pos]): while src[pos] not in error_on: pos += 1 - raise __tomli__suffixed_err( + raise v_tomli_suffixed_err( src, pos, f"Found invalid character {src[pos]!r}" ) return new_pos - def __tomli__skip_comment(src: str, pos: Pos) -> Pos: + def v_tomli_skip_comment(src: str, pos: v_tomli_Pos) -> v_tomli_Pos: try: char: str | None = src[pos] except IndexError: char = None if char == "#": - return __tomli__skip_until( + return v_tomli_skip_until( src, pos + 1, "\n", - error_on=__tomli__ILLEGAL_COMMENT_CHARS, + error_on=v_tomli_ILLEGAL_COMMENT_CHARS, error_on_eof=False, ) return pos - def __tomli__skip_comments_and_array_ws(src: str, pos: Pos) -> Pos: + def v_tomli_skip_comments_and_array_ws(src: str, pos: v_tomli_Pos) -> v_tomli_Pos: while True: pos_before_skip = pos - pos = __tomli__skip_chars(src, pos, __tomli__TOML_WS_AND_NEWLINE) - pos = __tomli__skip_comment(src, pos) + pos = v_tomli_skip_chars(src, pos, v_tomli_TOML_WS_AND_NEWLINE) + pos = v_tomli_skip_comment(src, pos) if pos == pos_before_skip: return pos - def __tomli__create_dict_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: - pos += 1 # Skip "[" - pos = __tomli__skip_chars(src, pos, __tomli__TOML_WS) - pos, key = __tomli__parse_key(src, pos) - if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN): - raise __tomli__suffixed_err(src, pos, f"Cannot declare {key} twice") - out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + def v_tomli_create_dict_rule( + src: str, pos: v_tomli_Pos, out: v_tomli_Output + ) -> tuple[v_tomli_Pos, Key]: + pos += 1 + pos = v_tomli_skip_chars(src, pos, v_tomli_TOML_WS) + pos, key = v_tomli_parse_key(src, pos) + if out.flags.is_(key, v_tomli_Flags.EXPLICIT_NEST) or out.flags.is_( + key, v_tomli_Flags.FROZEN + ): + raise v_tomli_suffixed_err(src, pos, f"Cannot declare {key} twice") + out.flags.set(key, v_tomli_Flags.EXPLICIT_NEST, recursive=False) try: out.data.get_or_create_nest(key) except KeyError: - raise __tomli__suffixed_err(src, pos, "Cannot overwrite a value") from None + raise v_tomli_suffixed_err(src, pos, "Cannot overwrite a value") from None if not src.startswith("]", pos): - raise __tomli__suffixed_err( + raise v_tomli_suffixed_err( src, pos, "Expected ']' at the end of a table declaration" ) return pos + 1, key - def __tomli__create_list_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: - pos += 2 # Skip "[[" - pos = __tomli__skip_chars(src, pos, __tomli__TOML_WS) - pos, key = __tomli__parse_key(src, pos) - if out.flags.is_(key, Flags.FROZEN): - raise __tomli__suffixed_err( + def v_tomli_create_list_rule( + src: str, pos: v_tomli_Pos, out: v_tomli_Output + ) -> tuple[v_tomli_Pos, Key]: + pos += 2 + pos = v_tomli_skip_chars(src, pos, v_tomli_TOML_WS) + pos, key = v_tomli_parse_key(src, pos) + if out.flags.is_(key, v_tomli_Flags.FROZEN): + raise v_tomli_suffixed_err( src, pos, f"Cannot mutate immutable namespace {key}" ) - # Free the namespace now that it points to another empty list item... out.flags.unset_all(key) - # ...but this key precisely is still prohibited from table declaration - out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + out.flags.set(key, v_tomli_Flags.EXPLICIT_NEST, recursive=False) try: out.data.append_nest_to_list(key) except KeyError: - raise __tomli__suffixed_err(src, pos, "Cannot overwrite a value") from None + raise v_tomli_suffixed_err(src, pos, "Cannot overwrite a value") from None if not src.startswith("]]", pos): - raise __tomli__suffixed_err( + raise v_tomli_suffixed_err( src, pos, "Expected ']]' at the end of an array declaration" ) return pos + 2, key - def __tomli__key_value_rule( - src: str, pos: Pos, out: Output, header: Key, parse_float: ParseFloat - ) -> Pos: - pos, key, value = __tomli__parse_key_value_pair(src, pos, parse_float) + def v_tomli_key_value_rule( + src: str, + pos: v_tomli_Pos, + out: v_tomli_Output, + header: Key, + parse_float: v_tomli_ParseFloat, + ) -> v_tomli_Pos: + pos, key, value = v_tomli_parse_key_value_pair(src, pos, parse_float) key_parent, key_stem = key[:-1], key[-1] abs_key_parent = header + key_parent relative_path_cont_keys = (header + key[:i] for i in range(1, len(key))) for cont_key in relative_path_cont_keys: - # Check that dotted key syntax does not redefine an existing table - if out.flags.is_(cont_key, Flags.EXPLICIT_NEST): - raise __tomli__suffixed_err( + if out.flags.is_(cont_key, v_tomli_Flags.EXPLICIT_NEST): + raise v_tomli_suffixed_err( src, pos, f"Cannot redefine namespace {cont_key}" ) - # Containers in the relative path can't be opened with the table syntax or - # dotted key/value syntax in following table sections. - out.flags.add_pending(cont_key, Flags.EXPLICIT_NEST) - if out.flags.is_(abs_key_parent, Flags.FROZEN): - raise __tomli__suffixed_err( + out.flags.add_pending(cont_key, v_tomli_Flags.EXPLICIT_NEST) + if out.flags.is_(abs_key_parent, v_tomli_Flags.FROZEN): + raise v_tomli_suffixed_err( src, pos, f"Cannot mutate immutable namespace {abs_key_parent}" ) try: nest = out.data.get_or_create_nest(abs_key_parent) except KeyError: - raise __tomli__suffixed_err(src, pos, "Cannot overwrite a value") from None + raise v_tomli_suffixed_err(src, pos, "Cannot overwrite a value") from None if key_stem in nest: - raise __tomli__suffixed_err(src, pos, "Cannot overwrite a value") - # Mark inline table and array namespaces recursively immutable + raise v_tomli_suffixed_err(src, pos, "Cannot overwrite a value") if isinstance(value, (dict, list)): - out.flags.set(header + key, Flags.FROZEN, recursive=True) + out.flags.set(header + key, v_tomli_Flags.FROZEN, recursive=True) nest[key_stem] = value return pos - def __tomli__parse_key_value_pair( - src: str, pos: Pos, parse_float: ParseFloat - ) -> tuple[Pos, Key, Any]: - pos, key = __tomli__parse_key(src, pos) + def v_tomli_parse_key_value_pair( + src: str, pos: v_tomli_Pos, parse_float: v_tomli_ParseFloat + ) -> tuple[v_tomli_Pos, Key, Any]: + pos, key = v_tomli_parse_key(src, pos) try: char: str | None = src[pos] except IndexError: char = None if char != "=": - raise __tomli__suffixed_err( + raise v_tomli_suffixed_err( src, pos, "Expected '=' after a key in a key/value pair" ) pos += 1 - pos = __tomli__skip_chars(src, pos, __tomli__TOML_WS) - pos, value = __tomli__parse_value(src, pos, parse_float) + pos = v_tomli_skip_chars(src, pos, v_tomli_TOML_WS) + pos, value = v_tomli_parse_value(src, pos, parse_float) return pos, key, value - def __tomli__parse_key(src: str, pos: Pos) -> tuple[Pos, Key]: - pos, key_part = __tomli__parse_key_part(src, pos) + def v_tomli_parse_key(src: str, pos: v_tomli_Pos) -> tuple[v_tomli_Pos, Key]: + pos, key_part = v_tomli_parse_key_part(src, pos) key: Key = (key_part,) - pos = __tomli__skip_chars(src, pos, __tomli__TOML_WS) + pos = v_tomli_skip_chars(src, pos, v_tomli_TOML_WS) while True: try: char: str | None = src[pos] @@ -523,197 +494,191 @@ except ImportError: if char != ".": return pos, key pos += 1 - pos = __tomli__skip_chars(src, pos, __tomli__TOML_WS) - pos, key_part = __tomli__parse_key_part(src, pos) + pos = v_tomli_skip_chars(src, pos, v_tomli_TOML_WS) + pos, key_part = v_tomli_parse_key_part(src, pos) key += (key_part,) - pos = __tomli__skip_chars(src, pos, __tomli__TOML_WS) + pos = v_tomli_skip_chars(src, pos, v_tomli_TOML_WS) - def __tomli__parse_key_part(src: str, pos: Pos) -> tuple[Pos, str]: + def v_tomli_parse_key_part(src: str, pos: v_tomli_Pos) -> tuple[v_tomli_Pos, str]: try: char: str | None = src[pos] except IndexError: char = None - if char in __tomli__BARE_KEY_CHARS: + if char in v_tomli_BARE_KEY_CHARS: start_pos = pos - pos = __tomli__skip_chars(src, pos, __tomli__BARE_KEY_CHARS) + pos = v_tomli_skip_chars(src, pos, v_tomli_BARE_KEY_CHARS) return pos, src[start_pos:pos] if char == "'": - return __tomli__parse_literal_str(src, pos) + return v_tomli_parse_literal_str(src, pos) if char == '"': - return __tomli__parse_one_line_basic_str(src, pos) - raise __tomli__suffixed_err( - src, pos, "Invalid initial character for a key part" - ) + return v_tomli_parse_one_line_basic_str(src, pos) + raise v_tomli_suffixed_err(src, pos, "Invalid initial character for a key part") - def __tomli__parse_one_line_basic_str(src: str, pos: Pos) -> tuple[Pos, str]: + def v_tomli_parse_one_line_basic_str( + src: str, pos: v_tomli_Pos + ) -> tuple[v_tomli_Pos, str]: pos += 1 - return __tomli__parse_basic_str(src, pos, multiline=False) + return v_tomli_parse_basic_str(src, pos, multiline=False) - def __tomli__parse_array( - src: str, pos: Pos, parse_float: ParseFloat - ) -> tuple[Pos, list]: + def v_tomli_parse_array( + src: str, pos: v_tomli_Pos, parse_float: v_tomli_ParseFloat + ) -> tuple[v_tomli_Pos, list]: pos += 1 array: list = [] - pos = __tomli__skip_comments_and_array_ws(src, pos) + pos = v_tomli_skip_comments_and_array_ws(src, pos) if src.startswith("]", pos): return pos + 1, array while True: - pos, val = __tomli__parse_value(src, pos, parse_float) + pos, val = v_tomli_parse_value(src, pos, parse_float) array.append(val) - pos = __tomli__skip_comments_and_array_ws(src, pos) + pos = v_tomli_skip_comments_and_array_ws(src, pos) c = src[pos : pos + 1] if c == "]": return pos + 1, array if c != ",": - raise __tomli__suffixed_err(src, pos, "Unclosed array") + raise v_tomli_suffixed_err(src, pos, "Unclosed array") pos += 1 - pos = __tomli__skip_comments_and_array_ws(src, pos) + pos = v_tomli_skip_comments_and_array_ws(src, pos) if src.startswith("]", pos): return pos + 1, array - def __tomli__parse_inline_table( - src: str, pos: Pos, parse_float: ParseFloat - ) -> tuple[Pos, dict]: + def v_tomli_parse_inline_table( + src: str, pos: v_tomli_Pos, parse_float: v_tomli_ParseFloat + ) -> tuple[v_tomli_Pos, dict]: pos += 1 - nested_dict = NestedDict() - flags = Flags() - pos = __tomli__skip_chars(src, pos, __tomli__TOML_WS) + nested_dict = v_tomli_NestedDict() + flags = v_tomli_Flags() + pos = v_tomli_skip_chars(src, pos, v_tomli_TOML_WS) if src.startswith("}", pos): return pos + 1, nested_dict.dict while True: - pos, key, value = __tomli__parse_key_value_pair(src, pos, parse_float) + pos, key, value = v_tomli_parse_key_value_pair(src, pos, parse_float) key_parent, key_stem = key[:-1], key[-1] - if flags.is_(key, Flags.FROZEN): - raise __tomli__suffixed_err( + if flags.is_(key, v_tomli_Flags.FROZEN): + raise v_tomli_suffixed_err( src, pos, f"Cannot mutate immutable namespace {key}" ) try: nest = nested_dict.get_or_create_nest(key_parent, access_lists=False) except KeyError: - raise __tomli__suffixed_err( + raise v_tomli_suffixed_err( src, pos, "Cannot overwrite a value" ) from None if key_stem in nest: - raise __tomli__suffixed_err( + raise v_tomli_suffixed_err( src, pos, f"Duplicate inline table key {key_stem!r}" ) nest[key_stem] = value - pos = __tomli__skip_chars(src, pos, __tomli__TOML_WS) + pos = v_tomli_skip_chars(src, pos, v_tomli_TOML_WS) c = src[pos : pos + 1] if c == "}": return pos + 1, nested_dict.dict if c != ",": - raise __tomli__suffixed_err(src, pos, "Unclosed inline table") + raise v_tomli_suffixed_err(src, pos, "Unclosed inline table") if isinstance(value, (dict, list)): - flags.set(key, Flags.FROZEN, recursive=True) + flags.set(key, v_tomli_Flags.FROZEN, recursive=True) pos += 1 - pos = __tomli__skip_chars(src, pos, __tomli__TOML_WS) + pos = v_tomli_skip_chars(src, pos, v_tomli_TOML_WS) - def __tomli__parse_basic_str_escape( - src: str, pos: Pos, *, multiline: bool = False - ) -> tuple[Pos, str]: + def v_tomli_parse_basic_str_escape( + src: str, pos: v_tomli_Pos, *, multiline: bool = False + ) -> tuple[v_tomli_Pos, str]: escape_id = src[pos : pos + 2] pos += 2 if multiline and escape_id in {"\\ ", "\\\t", "\\\n"}: - # Skip whitespace until next non-whitespace character or end of - # the doc. Error if non-whitespace is found before newline. if escape_id != "\\\n": - pos = __tomli__skip_chars(src, pos, __tomli__TOML_WS) + pos = v_tomli_skip_chars(src, pos, v_tomli_TOML_WS) try: char = src[pos] except IndexError: return pos, "" if char != "\n": - raise __tomli__suffixed_err(src, pos, "Unescaped '\\' in a string") + raise v_tomli_suffixed_err(src, pos, "Unescaped '\\' in a string") pos += 1 - pos = __tomli__skip_chars(src, pos, __tomli__TOML_WS_AND_NEWLINE) + pos = v_tomli_skip_chars(src, pos, v_tomli_TOML_WS_AND_NEWLINE) return pos, "" if escape_id == "\\u": - return __tomli__parse_hex_char(src, pos, 4) + return v_tomli_parse_hex_char(src, pos, 4) if escape_id == "\\U": - return __tomli__parse_hex_char(src, pos, 8) + return v_tomli_parse_hex_char(src, pos, 8) try: - return pos, __tomli__BASIC_STR_ESCAPE_REPLACEMENTS[escape_id] + return pos, v_tomli_BASIC_STR_ESCAPE_REPLACEMENTS[escape_id] except KeyError: - raise __tomli__suffixed_err( - src, pos, "Unescaped '\\' in a string" - ) from None + raise v_tomli_suffixed_err(src, pos, "Unescaped '\\' in a string") from None - def __tomli__parse_basic_str_escape_multiline( - src: str, pos: Pos - ) -> tuple[Pos, str]: - return __tomli__parse_basic_str_escape(src, pos, multiline=True) + def v_tomli_parse_basic_str_escape_multiline( + src: str, pos: v_tomli_Pos + ) -> tuple[v_tomli_Pos, str]: + return v_tomli_parse_basic_str_escape(src, pos, multiline=True) - def __tomli__parse_hex_char(src: str, pos: Pos, hex_len: int) -> tuple[Pos, str]: + def v_tomli_parse_hex_char( + src: str, pos: v_tomli_Pos, hex_len: int + ) -> tuple[v_tomli_Pos, str]: hex_str = src[pos : pos + hex_len] - if len(hex_str) != hex_len or not __tomli__HEXDIGIT_CHARS.issuperset(hex_str): - raise __tomli__suffixed_err(src, pos, "Invalid hex value") + if len(hex_str) != hex_len or not v_tomli_HEXDIGIT_CHARS.issuperset(hex_str): + raise v_tomli_suffixed_err(src, pos, "Invalid hex value") pos += hex_len hex_int = int(hex_str, 16) - if not __tomli__is_unicode_scalar_value(hex_int): - raise __tomli__suffixed_err( + if not v_tomli_is_unicode_scalar_value(hex_int): + raise v_tomli_suffixed_err( src, pos, "Escaped character is not a Unicode scalar value" ) return pos, chr(hex_int) - def __tomli__parse_literal_str(src: str, pos: Pos) -> tuple[Pos, str]: - pos += 1 # Skip starting apostrophe + def v_tomli_parse_literal_str( + src: str, pos: v_tomli_Pos + ) -> tuple[v_tomli_Pos, str]: + pos += 1 start_pos = pos - pos = __tomli__skip_until( - src, - pos, - "'", - error_on=__tomli__ILLEGAL_LITERAL_STR_CHARS, - error_on_eof=True, + pos = v_tomli_skip_until( + src, pos, "'", error_on=v_tomli_ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True ) - return pos + 1, src[start_pos:pos] # Skip ending apostrophe + return pos + 1, src[start_pos:pos] - def __tomli__parse_multiline_str( - src: str, pos: Pos, *, literal: bool - ) -> tuple[Pos, str]: + def v_tomli_parse_multiline_str( + src: str, pos: v_tomli_Pos, *, literal: bool + ) -> tuple[v_tomli_Pos, str]: pos += 3 if src.startswith("\n", pos): pos += 1 if literal: delim = "'" - end_pos = __tomli__skip_until( + end_pos = v_tomli_skip_until( src, pos, "'''", - error_on=__tomli__ILLEGAL_MULTILINE_LITERAL_STR_CHARS, + error_on=v_tomli_ILLEGAL_MULTILINE_LITERAL_STR_CHARS, error_on_eof=True, ) result = src[pos:end_pos] pos = end_pos + 3 else: delim = '"' - pos, result = __tomli__parse_basic_str(src, pos, multiline=True) - # Add at maximum two extra apostrophes/quotes if the end sequence - # is 4 or 5 chars long instead of just 3. + pos, result = v_tomli_parse_basic_str(src, pos, multiline=True) if not src.startswith(delim, pos): return pos, result pos += 1 if not src.startswith(delim, pos): return pos, result + delim pos += 1 - return pos, result + (delim * 2) + return pos, result + delim * 2 - def __tomli__parse_basic_str( - src: str, pos: Pos, *, multiline: bool - ) -> tuple[Pos, str]: + def v_tomli_parse_basic_str( + src: str, pos: v_tomli_Pos, *, multiline: bool + ) -> tuple[v_tomli_Pos, str]: if multiline: - error_on = __tomli__ILLEGAL_MULTILINE_BASIC_STR_CHARS - parse_escapes = __tomli__parse_basic_str_escape_multiline + error_on = v_tomli_ILLEGAL_MULTILINE_BASIC_STR_CHARS + parse_escapes = v_tomli_parse_basic_str_escape_multiline else: - error_on = __tomli__ILLEGAL_BASIC_STR_CHARS - parse_escapes = __tomli__parse_basic_str_escape + error_on = v_tomli_ILLEGAL_BASIC_STR_CHARS + parse_escapes = v_tomli_parse_basic_str_escape result = "" start_pos = pos while True: try: char = src[pos] except IndexError: - raise __tomli__suffixed_err(src, pos, "Unterminated string") from None + raise v_tomli_suffixed_err(src, pos, "Unterminated string") from None if char == '"': if not multiline: return pos + 1, result + src[start_pos:pos] @@ -728,73 +693,61 @@ except ImportError: start_pos = pos continue if char in error_on: - raise __tomli__suffixed_err(src, pos, f"Illegal character {char!r}") + raise v_tomli_suffixed_err(src, pos, f"Illegal character {char!r}") pos += 1 - def __tomli__parse_value( # noqa: C901 - src: str, pos: Pos, parse_float: ParseFloat - ) -> tuple[Pos, Any]: + def v_tomli_parse_value( + src: str, pos: v_tomli_Pos, parse_float: v_tomli_ParseFloat + ) -> tuple[v_tomli_Pos, Any]: try: char: str | None = src[pos] except IndexError: char = None - # IMPORTANT: order conditions based on speed of checking and likelihood - # Basic strings if char == '"': if src.startswith('"""', pos): - return __tomli__parse_multiline_str(src, pos, literal=False) - return __tomli__parse_one_line_basic_str(src, pos) - # Literal strings + return v_tomli_parse_multiline_str(src, pos, literal=False) + return v_tomli_parse_one_line_basic_str(src, pos) if char == "'": if src.startswith("'''", pos): - return __tomli__parse_multiline_str(src, pos, literal=True) - return __tomli__parse_literal_str(src, pos) - # Booleans + return v_tomli_parse_multiline_str(src, pos, literal=True) + return v_tomli_parse_literal_str(src, pos) if char == "t": if src.startswith("true", pos): return pos + 4, True if char == "f": if src.startswith("false", pos): return pos + 5, False - # Arrays if char == "[": - return __tomli__parse_array(src, pos, parse_float) - # Inline tables + return v_tomli_parse_array(src, pos, parse_float) if char == "{": - return __tomli__parse_inline_table(src, pos, parse_float) - # Dates and times - datetime_match = __tomli__RE_DATETIME.match(src, pos) + return v_tomli_parse_inline_table(src, pos, parse_float) + datetime_match = v_tomli_RE_DATETIME.match(src, pos) if datetime_match: try: - datetime_obj = __tomli__match_to_datetime(datetime_match) + datetime_obj = v_tomli_match_to_datetime(datetime_match) except ValueError as e: - raise __tomli__suffixed_err(src, pos, "Invalid date or datetime") from e + raise v_tomli_suffixed_err(src, pos, "Invalid date or datetime") from e return datetime_match.end(), datetime_obj - localtime_match = __tomli__RE_LOCALTIME.match(src, pos) + localtime_match = v_tomli_RE_LOCALTIME.match(src, pos) if localtime_match: - return localtime_match.end(), __tomli__match_to_localtime(localtime_match) - # Integers and "normal" floats. - # The regex will greedily match any type starting with a decimal - # char, so needs to be located after handling of dates and times. - number_match = __tomli__RE_NUMBER.match(src, pos) + return localtime_match.end(), v_tomli_match_to_localtime(localtime_match) + number_match = v_tomli_RE_NUMBER.match(src, pos) if number_match: - return number_match.end(), __tomli__match_to_number( + return number_match.end(), v_tomli_match_to_number( number_match, parse_float ) - # Special floats first_three = src[pos : pos + 3] if first_three in {"inf", "nan"}: return pos + 3, parse_float(first_three) first_four = src[pos : pos + 4] if first_four in {"-inf", "+inf", "-nan", "+nan"}: return pos + 4, parse_float(first_four) - raise __tomli__suffixed_err(src, pos, "Invalid value") + raise v_tomli_suffixed_err(src, pos, "Invalid value") - def __tomli__suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError: - """Return a `TOMLDecodeError` where error message is suffixed with - coordinates in source.""" - - def coord_repr(src: str, pos: Pos) -> str: + def v_tomli_suffixed_err( + src: str, pos: v_tomli_Pos, msg: str + ) -> v_tomli_TOMLDecodeError: + def coord_repr(src: str, pos: v_tomli_Pos) -> str: if pos >= len(src): return "end of document" line = src.count("\n", 0, pos) + 1 @@ -804,19 +757,14 @@ except ImportError: column = pos - src.rindex("\n", 0, pos) return f"line {line}, column {column}" - return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})") + return v_tomli_TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})") - def __tomli__is_unicode_scalar_value(codepoint: int) -> bool: - return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111) + def v_tomli_is_unicode_scalar_value(codepoint: int) -> bool: + return 0 <= codepoint <= 55295 or 57344 <= codepoint <= 1114111 - def __tomli__make_safe_parse_float(parse_float: ParseFloat) -> ParseFloat: - """A decorator to make `parse_float` safe. - `parse_float` must not return dicts or lists, because these types - would be mixed with parsed TOML tables and arrays, thus confusing - the parser. The returned decorated callable raises `ValueError` - instead of returning illegal types. - """ - # The default `float` callable never returns illegal types. Optimize it. + def v_tomli_make_safe_parse_float( + parse_float: v_tomli_ParseFloat, + ) -> v_tomli_ParseFloat: if parse_float is float: return float @@ -828,9 +776,927 @@ except ImportError: return safe_parse_float - toml_loads = __tomli__loads + toml_loads = v_tomli_loads -##### END VENDORED TOMLI ##### +#### END VENDORED TOMLI #### + + +#### START VENDORED PACKAGING #### + +# MODIFIED FROM https://github.com/pypa/packaging +# see repo for original licenses +# This software is made available under the terms of *either* of the licenses +# found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made +# under the terms of *both* these licenses. + +import abc # noqa +import itertools # noqa +import re # noqa +from typing import ( # noqa + Any, + Callable, + Iterable, + Iterator, + List, + NamedTuple, + Optional, + Set, + SupportsInt, + Tuple, + TypeVar, + Union, +) + + +class v_packaging_InfinityType: + def __repr__(self) -> str: + return "v_packaging_Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return False + + def __le__(self, other: object) -> bool: + return False + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return True + + def __ge__(self, other: object) -> bool: + return True + + def __neg__(self: object) -> "v_packaging_NegativeInfinityType": + return v_packaging_NegativeInfinity + + +v_packaging_Infinity = v_packaging_InfinityType() + + +class v_packaging_NegativeInfinityType: + def __repr__(self) -> str: + return "-Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return True + + def __le__(self, other: object) -> bool: + return True + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return False + + def __ge__(self, other: object) -> bool: + return False + + def __neg__(self: object) -> v_packaging_InfinityType: + return v_packaging_Infinity + + +v_packaging_NegativeInfinity = v_packaging_NegativeInfinityType() +v_packaging_LocalType = Tuple[Union[int, str], ...] +v_packaging_CmpPrePostDevType = Union[ + v_packaging_InfinityType, v_packaging_NegativeInfinityType, Tuple[str, int] +] +v_packaging_CmpLocalType = Union[ + v_packaging_NegativeInfinityType, + Tuple[ + Union[ + Tuple[int, str], Tuple[v_packaging_NegativeInfinityType, Union[int, str]] + ], + ..., + ], +] +v_packaging_CmpKey = Tuple[ + int, + Tuple[int, ...], + v_packaging_CmpPrePostDevType, + v_packaging_CmpPrePostDevType, + v_packaging_CmpPrePostDevType, + v_packaging_CmpLocalType, +] +v_packaging_VersionComparisonMethod = Callable[ + [v_packaging_CmpKey, v_packaging_CmpKey], bool +] + + +class v_packaging__Version(NamedTuple): + epoch: int + release: Tuple[int, ...] + dev: Optional[Tuple[str, int]] + pre: Optional[Tuple[str, int]] + post: Optional[Tuple[str, int]] + local: Optional[v_packaging_LocalType] + + +def v_packaging_parse(version: str) -> "v_packaging_Version": + return v_packaging_Version(version) + + +class v_packaging_InvalidVersion(ValueError): + pass + + +class v_packaging__BaseVersion: + _key: Tuple[Any, ...] + + def __hash__(self) -> int: + return hash(self._key) + + def __lt__(self, other: "v_packaging__BaseVersion") -> bool: + if not isinstance(other, v_packaging__BaseVersion): + return NotImplemented + return self._key < other._key + + def __le__(self, other: "v_packaging__BaseVersion") -> bool: + if not isinstance(other, v_packaging__BaseVersion): + return NotImplemented + return self._key <= other._key + + def __eq__(self, other: object) -> bool: + if not isinstance(other, v_packaging__BaseVersion): + return NotImplemented + return self._key == other._key + + def __ge__(self, other: "v_packaging__BaseVersion") -> bool: + if not isinstance(other, v_packaging__BaseVersion): + return NotImplemented + return self._key >= other._key + + def __gt__(self, other: "v_packaging__BaseVersion") -> bool: + if not isinstance(other, v_packaging__BaseVersion): + return NotImplemented + return self._key > other._key + + def __ne__(self, other: object) -> bool: + if not isinstance(other, v_packaging__BaseVersion): + return NotImplemented + return self._key != other._key + + +v_packaging__VERSION_PATTERN = """ + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\\.]?
+            (?Palpha|a|beta|b|preview|pre|c|rc)
+            [-_\\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\\.]?
+                (?Ppost|rev|r)
+                [-_\\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\\.]?
+            (?Pdev)
+            [-_\\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\\+(?P[a-z0-9]+(?:[-_\\.][a-z0-9]+)*))?       # local version
+"""
+v_packaging_VERSION_PATTERN = v_packaging__VERSION_PATTERN
+
+
+class v_packaging_Version(v_packaging__BaseVersion):
+    _regex = re.compile(
+        "^\\s*" + v_packaging_VERSION_PATTERN + "\\s*$", re.VERBOSE | re.IGNORECASE
+    )
+    _key: v_packaging_CmpKey
+
+    def __init__(self, version: str) -> None:
+        match = self._regex.search(version)
+        if not match:
+            raise v_packaging_InvalidVersion(f"Invalid version: '{version}'")
+        self._version = v_packaging__Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=v_packaging__parse_letter_version(
+                match.group("pre_l"), match.group("pre_n")
+            ),
+            post=v_packaging__parse_letter_version(
+                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+            ),
+            dev=v_packaging__parse_letter_version(
+                match.group("dev_l"), match.group("dev_n")
+            ),
+            local=v_packaging__parse_local_version(match.group("local")),
+        )
+        self._key = v_packaging__cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self) -> str:
+        return f""
+
+    def __str__(self) -> str:
+        parts = []
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+        parts.append(".".join(str(x) for x in self.release))
+        if self.pre is not None:
+            parts.append("".join(str(x) for x in self.pre))
+        if self.post is not None:
+            parts.append(f".post{self.post}")
+        if self.dev is not None:
+            parts.append(f".dev{self.dev}")
+        if self.local is not None:
+            parts.append(f"+{self.local}")
+        return "".join(parts)
+
+    @property
+    def epoch(self) -> int:
+        return self._version.epoch
+
+    @property
+    def release(self) -> Tuple[int, ...]:
+        return self._version.release
+
+    @property
+    def pre(self) -> Optional[Tuple[str, int]]:
+        return self._version.pre
+
+    @property
+    def post(self) -> Optional[int]:
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self) -> Optional[int]:
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self) -> Optional[str]:
+        if self._version.local:
+            return ".".join(str(x) for x in self._version.local)
+        else:
+            return None
+
+    @property
+    def public(self) -> str:
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self) -> str:
+        parts = []
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+        parts.append(".".join(str(x) for x in self.release))
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self) -> bool:
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self) -> bool:
+        return self.post is not None
+
+    @property
+    def is_devrelease(self) -> bool:
+        return self.dev is not None
+
+    @property
+    def major(self) -> int:
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self) -> int:
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self) -> int:
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+def v_packaging__parse_letter_version(
+    letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
+) -> Optional[Tuple[str, int]]:
+    if letter:
+        if number is None:
+            number = 0
+        letter = letter.lower()
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+        return letter, int(number)
+    if not letter and number:
+        letter = "post"
+        return letter, int(number)
+    return None
+
+
+v_packaging__local_version_separators = re.compile("[\\._-]")
+
+
+def v_packaging__parse_local_version(
+    local: Optional[str],
+) -> Optional[v_packaging_LocalType]:
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in v_packaging__local_version_separators.split(local)
+        )
+    return None
+
+
+def v_packaging__cmpkey(
+    epoch: int,
+    release: Tuple[int, ...],
+    pre: Optional[Tuple[str, int]],
+    post: Optional[Tuple[str, int]],
+    dev: Optional[Tuple[str, int]],
+    local: Optional[v_packaging_LocalType],
+) -> v_packaging_CmpKey:
+    _release = tuple(
+        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+    )
+    if pre is None and post is None and dev is not None:
+        _pre: v_packaging_CmpPrePostDevType = v_packaging_NegativeInfinity
+    elif pre is None:
+        _pre = v_packaging_Infinity
+    else:
+        _pre = pre
+    if post is None:
+        _post: v_packaging_CmpPrePostDevType = v_packaging_NegativeInfinity
+    else:
+        _post = post
+    if dev is None:
+        _dev: v_packaging_CmpPrePostDevType = v_packaging_Infinity
+    else:
+        _dev = dev
+    if local is None:
+        _local: v_packaging_CmpLocalType = v_packaging_NegativeInfinity
+    else:
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (v_packaging_NegativeInfinity, i)
+            for i in local
+        )
+    return epoch, _release, _pre, _post, _dev, _local
+
+
+def v_packaging_canonicalize_version(
+    version: Union[v_packaging_Version, str], *, strip_trailing_zero: bool = True
+) -> str:
+    if isinstance(version, str):
+        try:
+            v_packaging_parsed = v_packaging_Version(version)
+        except v_packaging_InvalidVersion:
+            return version
+    else:
+        v_packaging_parsed = version
+    parts = []
+    if v_packaging_parsed.epoch != 0:
+        parts.append(f"{v_packaging_parsed.epoch}!")
+    release_segment = ".".join(str(x) for x in v_packaging_parsed.release)
+    if strip_trailing_zero:
+        release_segment = re.sub("(\\.0)+$", "", release_segment)
+    parts.append(release_segment)
+    if v_packaging_parsed.pre is not None:
+        parts.append("".join(str(x) for x in v_packaging_parsed.pre))
+    if v_packaging_parsed.post is not None:
+        parts.append(f".post{v_packaging_parsed.post}")
+    if v_packaging_parsed.dev is not None:
+        parts.append(f".dev{v_packaging_parsed.dev}")
+    if v_packaging_parsed.local is not None:
+        parts.append(f"+{v_packaging_parsed.local}")
+    return "".join(parts)
+
+
+v_packaging_UnparsedVersion = Union[v_packaging_Version, str]
+v_packaging_UnparsedVersionVar = TypeVar(
+    "v_packaging_UnparsedVersionVar", bound=v_packaging_UnparsedVersion
+)
+v_packaging_CallableOperator = Callable[[v_packaging_Version, str], bool]
+
+
+def v_packaging__coerce_version(
+    version: v_packaging_UnparsedVersion,
+) -> v_packaging_Version:
+    if not isinstance(version, v_packaging_Version):
+        version = v_packaging_Version(version)
+    return version
+
+
+class v_packaging_InvalidSpecifier(ValueError):
+    pass
+
+
+class v_packaging_BaseSpecifier(metaclass=abc.ABCMeta):
+    @abc.abstractmethod
+    def __str__(self) -> str:
+        pass
+
+    @abc.abstractmethod
+    def __hash__(self) -> int:
+        pass
+
+    @abc.abstractmethod
+    def __eq__(self, other: object) -> bool:
+        pass
+
+    @property
+    @abc.abstractmethod
+    def prereleases(self) -> Optional[bool]:
+        pass
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        pass
+
+    @abc.abstractmethod
+    def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
+        pass
+
+    @abc.abstractmethod
+    def filter(
+        self,
+        iterable: Iterable[v_packaging_UnparsedVersionVar],
+        prereleases: Optional[bool] = None,
+    ) -> Iterator[v_packaging_UnparsedVersionVar]:
+        pass
+
+
+class v_packaging_Specifier(v_packaging_BaseSpecifier):
+    _operator_regex_str = """
+        (?P(~=|==|!=|<=|>=|<|>|===))
+        """
+    _version_regex_str = """
+        (?P
+            (?:
+                # The identity operators allow for an escape hatch that will
+                # do an exact string match of the version you wish to install.
+                # This will not be v_packaging_parsed by PEP 440 and we cannot determine
+                # any semantic meaning from it. This operator is discouraged
+                # but included entirely as an escape hatch.
+                (?<====)  # Only match for the identity operator
+                \\s*
+                [^\\s;)]*  # The arbitrary version can be just about anything,
+                          # we match everything except for whitespace, a
+                          # semi-colon for marker support, and a closing paren
+                          # since versions can be enclosed in them.
+            )
+            |
+            (?:
+                # The (non)equality operators allow for wild card and local
+                # versions to be specified so we have to define these two
+                # operators separately to enable that.
+                (?<===|!=)            # Only match for equals and not equals
+                \\s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\\.[0-9]+)*   # release
+                # You cannot use a wild card and a pre-release, post-release, a dev or
+                # local version together so group them with a | and make them optional.
+                (?:
+                    \\.\\*  # Wild card syntax of .*
+                    |
+                    (?:                                  # pre release
+                        [-_\\.]?
+                        (alpha|beta|preview|pre|a|b|c|rc)
+                        [-_\\.]?
+                        [0-9]*
+                    )?
+                    (?:                                  # post release
+                        (?:-[0-9]+)|(?:[-_\\.]?(post|rev|r)[-_\\.]?[0-9]*)
+                    )?
+                    (?:[-_\\.]?dev[-_\\.]?[0-9]*)?         # dev release
+                    (?:\\+[a-z0-9]+(?:[-_\\.][a-z0-9]+)*)? # local
+                )?
+            )
+            |
+            (?:
+                # The compatible operator requires at least two digits in the
+                # release segment.
+                (?<=~=)               # Only match for the compatible operator
+                \\s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\\.[0-9]+)+   # release  (We have a + instead of a *)
+                (?:                   # pre release
+                    [-_\\.]?
+                    (alpha|beta|preview|pre|a|b|c|rc)
+                    [-_\\.]?
+                    [0-9]*
+                )?
+                (?:                                   # post release
+                    (?:-[0-9]+)|(?:[-_\\.]?(post|rev|r)[-_\\.]?[0-9]*)
+                )?
+                (?:[-_\\.]?dev[-_\\.]?[0-9]*)?          # dev release
+            )
+            |
+            (?:
+                # All other operators only allow a sub set of what the
+                # (non)equality operators do. Specifically they do not allow
+                # local versions to be specified nor do they allow the prefix
+                # matching wild cards.
+                (?=": "greater_than_equal",
+        "<": "less_than",
+        ">": "greater_than",
+        "===": "arbitrary",
+    }
+
+    def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
+        match = self._regex.search(spec)
+        if not match:
+            raise v_packaging_InvalidSpecifier(f"Invalid specifier: '{spec}'")
+        self._spec: Tuple[str, str] = (
+            match.group("operator").strip(),
+            match.group("version").strip(),
+        )
+        self._prereleases = prereleases
+
+    @property
+    def prereleases(self) -> bool:
+        if self._prereleases is not None:
+            return self._prereleases
+        operator, version = self._spec
+        if operator in ["==", ">=", "<=", "~=", "==="]:
+            if operator == "==" and version.endswith(".*"):
+                version = version[:-2]
+            if v_packaging_Version(version).is_prerelease:
+                return True
+        return False
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        self._prereleases = value
+
+    @property
+    def operator(self) -> str:
+        return self._spec[0]
+
+    @property
+    def version(self) -> str:
+        return self._spec[1]
+
+    def __repr__(self) -> str:
+        pre = (
+            f", prereleases={self.prereleases!r}"
+            if self._prereleases is not None
+            else ""
+        )
+        return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
+
+    def __str__(self) -> str:
+        return "{}{}".format(*self._spec)
+
+    @property
+    def _canonical_spec(self) -> Tuple[str, str]:
+        canonical_version = v_packaging_canonicalize_version(
+            self._spec[1], strip_trailing_zero=self._spec[0] != "~="
+        )
+        return self._spec[0], canonical_version
+
+    def __hash__(self) -> int:
+        return hash(self._canonical_spec)
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, str):
+            try:
+                other = self.__class__(str(other))
+            except v_packaging_InvalidSpecifier:
+                return NotImplemented
+        elif not isinstance(other, self.__class__):
+            return NotImplemented
+        return self._canonical_spec == other._canonical_spec
+
+    def _get_operator(self, op: str) -> v_packaging_CallableOperator:
+        operator_callable: v_packaging_CallableOperator = getattr(
+            self, f"_compare_{self._operators[op]}"
+        )
+        return operator_callable
+
+    def _compare_compatible(self, prospective: v_packaging_Version, spec: str) -> bool:
+        prefix = ".".join(
+            list(
+                itertools.takewhile(
+                    v_packaging__is_not_suffix, v_packaging__version_split(spec)
+                )
+            )[:-1]
+        )
+        prefix += ".*"
+        return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
+            prospective, prefix
+        )
+
+    def _compare_equal(self, prospective: v_packaging_Version, spec: str) -> bool:
+        if spec.endswith(".*"):
+            normalized_prospective = v_packaging_canonicalize_version(
+                prospective.public, strip_trailing_zero=False
+            )
+            normalized_spec = v_packaging_canonicalize_version(
+                spec[:-2], strip_trailing_zero=False
+            )
+            split_spec = v_packaging__version_split(normalized_spec)
+            split_prospective = v_packaging__version_split(normalized_prospective)
+            padded_prospective, _ = v_packaging__pad_version(
+                split_prospective, split_spec
+            )
+            shortened_prospective = padded_prospective[: len(split_spec)]
+            return shortened_prospective == split_spec
+        else:
+            spec_version = v_packaging_Version(spec)
+            if not spec_version.local:
+                prospective = v_packaging_Version(prospective.public)
+            return prospective == spec_version
+
+    def _compare_not_equal(self, prospective: v_packaging_Version, spec: str) -> bool:
+        return not self._compare_equal(prospective, spec)
+
+    def _compare_less_than_equal(
+        self, prospective: v_packaging_Version, spec: str
+    ) -> bool:
+        return v_packaging_Version(prospective.public) <= v_packaging_Version(spec)
+
+    def _compare_greater_than_equal(
+        self, prospective: v_packaging_Version, spec: str
+    ) -> bool:
+        return v_packaging_Version(prospective.public) >= v_packaging_Version(spec)
+
+    def _compare_less_than(
+        self, prospective: v_packaging_Version, spec_str: str
+    ) -> bool:
+        spec = v_packaging_Version(spec_str)
+        if not prospective < spec:
+            return False
+        if not spec.is_prerelease and prospective.is_prerelease:
+            if v_packaging_Version(prospective.base_version) == v_packaging_Version(
+                spec.base_version
+            ):
+                return False
+        return True
+
+    def _compare_greater_than(
+        self, prospective: v_packaging_Version, spec_str: str
+    ) -> bool:
+        spec = v_packaging_Version(spec_str)
+        if not prospective > spec:
+            return False
+        if not spec.is_postrelease and prospective.is_postrelease:
+            if v_packaging_Version(prospective.base_version) == v_packaging_Version(
+                spec.base_version
+            ):
+                return False
+        if prospective.local is not None:
+            if v_packaging_Version(prospective.base_version) == v_packaging_Version(
+                spec.base_version
+            ):
+                return False
+        return True
+
+    def _compare_arbitrary(self, prospective: v_packaging_Version, spec: str) -> bool:
+        return str(prospective).lower() == str(spec).lower()
+
+    def __contains__(self, item: Union[str, v_packaging_Version]) -> bool:
+        return self.contains(item)
+
+    def contains(
+        self, item: v_packaging_UnparsedVersion, prereleases: Optional[bool] = None
+    ) -> bool:
+        if prereleases is None:
+            prereleases = self.prereleases
+        normalized_item = v_packaging__coerce_version(item)
+        if normalized_item.is_prerelease and not prereleases:
+            return False
+        operator_callable: v_packaging_CallableOperator = self._get_operator(
+            self.operator
+        )
+        return operator_callable(normalized_item, self.version)
+
+    def filter(
+        self,
+        iterable: Iterable[v_packaging_UnparsedVersionVar],
+        prereleases: Optional[bool] = None,
+    ) -> Iterator[v_packaging_UnparsedVersionVar]:
+        yielded = False
+        found_prereleases = []
+        kw = {"prereleases": prereleases if prereleases is not None else True}
+        for version in iterable:
+            v_packaging_parsed_version = v_packaging__coerce_version(version)
+            if self.contains(v_packaging_parsed_version, **kw):
+                if v_packaging_parsed_version.is_prerelease and not (
+                    prereleases or self.prereleases
+                ):
+                    found_prereleases.append(version)
+                else:
+                    yielded = True
+                    yield version
+        if not yielded and found_prereleases:
+            for version in found_prereleases:
+                yield version
+
+
+v_packaging__prefix_regex = re.compile("^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
+
+
+def v_packaging__version_split(version: str) -> List[str]:
+    result: List[str] = []
+    for item in version.split("."):
+        match = v_packaging__prefix_regex.search(item)
+        if match:
+            result.extend(match.groups())
+        else:
+            result.append(item)
+    return result
+
+
+def v_packaging__is_not_suffix(segment: str) -> bool:
+    return not any(
+        segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
+    )
+
+
+def v_packaging__pad_version(
+    left: List[str], right: List[str]
+) -> Tuple[List[str], List[str]]:
+    left_split, right_split = [], []
+    left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
+    right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
+    left_split.append(left[len(left_split[0]) :])
+    right_split.append(right[len(right_split[0]) :])
+    left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
+    right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
+    return list(itertools.chain(*left_split)), list(itertools.chain(*right_split))
+
+
+class v_packaging_SpecifierSet(v_packaging_BaseSpecifier):
+    def __init__(
+        self, specifiers: str = "", prereleases: Optional[bool] = None
+    ) -> None:
+        split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+        v_packaging_parsed: Set[v_packaging_Specifier] = set()
+        for specifier in split_specifiers:
+            v_packaging_parsed.add(v_packaging_Specifier(specifier))
+        self._specs = frozenset(v_packaging_parsed)
+        self._prereleases = prereleases
+
+    @property
+    def prereleases(self) -> Optional[bool]:
+        if self._prereleases is not None:
+            return self._prereleases
+        if not self._specs:
+            return None
+        return any(s.prereleases for s in self._specs)
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        self._prereleases = value
+
+    def __repr__(self) -> str:
+        pre = (
+            f", prereleases={self.prereleases!r}"
+            if self._prereleases is not None
+            else ""
+        )
+        return f""
+
+    def __str__(self) -> str:
+        return ",".join(sorted(str(s) for s in self._specs))
+
+    def __hash__(self) -> int:
+        return hash(self._specs)
+
+    def __and__(
+        self, other: Union["v_packaging_SpecifierSet", str]
+    ) -> "v_packaging_SpecifierSet":
+        if isinstance(other, str):
+            other = v_packaging_SpecifierSet(other)
+        elif not isinstance(other, v_packaging_SpecifierSet):
+            return NotImplemented
+        specifier = v_packaging_SpecifierSet()
+        specifier._specs = frozenset(self._specs | other._specs)
+        if self._prereleases is None and other._prereleases is not None:
+            specifier._prereleases = other._prereleases
+        elif self._prereleases is not None and other._prereleases is None:
+            specifier._prereleases = self._prereleases
+        elif self._prereleases == other._prereleases:
+            specifier._prereleases = self._prereleases
+        else:
+            raise ValueError(
+                "Cannot combine v_packaging_SpecifierSets with True and False prerelease overrides."  # noqa
+            )
+        return specifier
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, (str, v_packaging_Specifier)):
+            other = v_packaging_SpecifierSet(str(other))
+        elif not isinstance(other, v_packaging_SpecifierSet):
+            return NotImplemented
+        return self._specs == other._specs
+
+    def __len__(self) -> int:
+        return len(self._specs)
+
+    def __iter__(self) -> Iterator[v_packaging_Specifier]:
+        return iter(self._specs)
+
+    def __contains__(self, item: v_packaging_UnparsedVersion) -> bool:
+        return self.contains(item)
+
+    def contains(
+        self,
+        item: v_packaging_UnparsedVersion,
+        prereleases: Optional[bool] = None,
+        installed: Optional[bool] = None,
+    ) -> bool:
+        if not isinstance(item, v_packaging_Version):
+            item = v_packaging_Version(item)
+        if prereleases is None:
+            prereleases = self.prereleases
+        if not prereleases and item.is_prerelease:
+            return False
+        if installed and item.is_prerelease:
+            item = v_packaging_Version(item.base_version)
+        return all(s.contains(item, prereleases=prereleases) for s in self._specs)
+
+    def filter(
+        self,
+        iterable: Iterable[v_packaging_UnparsedVersionVar],
+        prereleases: Optional[bool] = None,
+    ) -> Iterator[v_packaging_UnparsedVersionVar]:
+        if prereleases is None:
+            prereleases = self.prereleases
+        if self._specs:
+            for spec in self._specs:
+                iterable = spec.filter(iterable, prereleases=bool(prereleases))
+            return iter(iterable)
+        else:
+            filtered: List[v_packaging_UnparsedVersionVar] = []
+            found_prereleases: List[v_packaging_UnparsedVersionVar] = []
+            for item in iterable:
+                v_packaging_parsed_version = v_packaging__coerce_version(item)
+                if v_packaging_parsed_version.is_prerelease and not prereleases:
+                    if not filtered:
+                        found_prereleases.append(item)
+                else:
+                    filtered.append(item)
+            if not filtered and found_prereleases and prereleases is None:
+                return iter(found_prereleases)
+            return iter(filtered)
+
+
+Version = v_packaging_Version
+SpecifierSet = v_packaging_SpecifierSet
+
+#### END VENDORED PACKAGING ####
 
 
 class Spinner:
@@ -2027,6 +2893,15 @@ def _read_metadata_block(script: str) -> dict:
         return {}
 
 
+def _check_python(requires: str):
+    version = Version(platform.python_version())
+    if version not in SpecifierSet(requires):
+        err_quit(
+            f"Running python {a.yellow}{version}{a.end} does "
+            f"not satisfy 'requires-python: {requires}'"
+        )
+
+
 def _parse_date(txt: str) -> datetime:
     """attempt to parse datetime string
 
@@ -2532,9 +3407,11 @@ class Viv:
                 script_text = fetch_script(script)
 
             viv_used = uses_viv(script_text)
-            deps = _read_metadata_block(script_text).get("dependencies", [])
+            metadata = _read_metadata_block(script_text)
+            deps = metadata.get("dependencies", [])
 
-            # TODO: incorporate python version checking...
+            if requires := metadata.get("requires-python", ""):
+                _check_python(requires)
 
             if viv_used and deps:
                 error(
diff --git a/tests/test_vendored.py b/tests/test_vendored.py
new file mode 100644
index 0000000..736a2c5
--- /dev/null
+++ b/tests/test_vendored.py
@@ -0,0 +1,17 @@
+from viv import SpecifierSet, Version, toml_loads
+
+
+def test_packaging():
+    assert Version("3.6") in SpecifierSet(">=3.6")
+
+
+def test_tomli():
+    assert {"requires-python": ">3.6", "dependencies": ["rich", "typer"]} == toml_loads(
+        """
+    requires-python = ">3.6"
+    dependencies = [
+        "rich",
+        "typer"
+    ]
+    """
+    )