From 13b65103fdf48e94140316196a5d10fe3f081db8 Mon Sep 17 00:00:00 2001 From: ducklet Date: Thu, 2 Feb 2023 00:00:24 +0100 Subject: [PATCH 01/25] [wip] use Python 3.11 & update all dependencies --- Dockerfile | 11 +- poetry.lock | 811 ++++++++++++++++++++++++----------------------- pyproject.toml | 21 +- unwind/config.py | 6 +- 4 files changed, 439 insertions(+), 410 deletions(-) diff --git a/Dockerfile b/Dockerfile index 7522a11..cef1ba6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.10-alpine +FROM docker.io/library/python:3.11-alpine RUN apk update --no-cache \ && apk upgrade --no-cache \ @@ -11,13 +11,8 @@ WORKDIR /var/app COPY requirements.txt ./ -# Required to build greenlet on Alpine, dependency of SQLAlchemy 1.4. -RUN apk add --no-cache \ - --virtual .build-deps \ - g++ gcc musl-dev \ - && pip install --no-cache-dir --upgrade \ - --requirement requirements.txt \ - && apk del .build-deps +RUN pip install --no-cache-dir --upgrade \ + --requirement requirements.txt USER 10000:10001 diff --git a/poetry.lock b/poetry.lock index 9405979..a3c5a5b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,13 +1,16 @@ +# This file is automatically @generated by Poetry and should not be changed by hand. + [[package]] name = "aiosqlite" -version = "0.17.0" +version = "0.18.0" description = "asyncio bridge to the standard sqlite3 module" category = "main" optional = false -python-versions = ">=3.6" - -[package.dependencies] -typing_extensions = ">=3.7.2" +python-versions = ">=3.7" +files = [ + {file = "aiosqlite-0.18.0-py3-none-any.whl", hash = "sha256:c3511b841e3a2c5614900ba1d179f366826857586f78abd75e7cbeb88e75a557"}, + {file = "aiosqlite-0.18.0.tar.gz", hash = "sha256:faa843ef5fb08bafe9a9b3859012d3d9d6f77ce3637899de20606b7fc39aa213"}, +] [[package]] name = "anyio" @@ -16,6 +19,10 @@ description = "High level compatibility layer for multiple asynchronous event lo category = "main" optional = false python-versions = ">=3.6.2" +files = [ + {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, + {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, +] [package.dependencies] idna = ">=2.8" @@ -28,25 +35,34 @@ trio = ["trio (>=0.16,<0.22)"] [[package]] name = "attrs" -version = "22.1.0" +version = "22.2.0" description = "Classes Without Boilerplate" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" +files = [ + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, +] [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] [[package]] name = "beautifulsoup4" -version = "4.11.1" +version = "4.11.2" description = "Screen-scraping library" category = "main" optional = false python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.11.2-py3-none-any.whl", hash = "sha256:0e79446b10b3ecb499c1556f7e228a53e64a2bfcebd455f370d8927cb5b59e39"}, + {file = "beautifulsoup4-4.11.2.tar.gz", hash = "sha256:bc4bdda6717de5a2987436fb8d72f45dc90dd856bdfd512a1314ce90349a0106"}, +] [package.dependencies] soupsieve = ">1.2" @@ -57,18 +73,45 @@ lxml = ["lxml"] [[package]] name = "black" -version = "22.10.0" +version = "23.1.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"}, + {file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"}, + {file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"}, + {file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"}, + {file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"}, + {file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"}, + {file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"}, + {file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"}, + {file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"}, + {file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"}, + {file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"}, + {file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"}, + {file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"}, + {file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"}, +] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" +packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -78,22 +121,113 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2022.9.24" +version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, +] [[package]] name = "charset-normalizer" -version = "2.1.1" +version = "3.0.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode_backport = ["unicodedata2"] +python-versions = "*" +files = [ + {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, + {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, +] [[package]] name = "click" @@ -102,29 +236,41 @@ description = "Composable command line interface toolkit" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "colorama" -version = "0.4.5" +version = "0.4.6" description = "Cross-platform colored terminal text." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] [[package]] name = "databases" -version = "0.6.1" +version = "0.7.0" description = "Async database support for Python." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "databases-0.7.0-py3-none-any.whl", hash = "sha256:cf5da4b8a3e3cd038c459529725ebb64931cbbb7a091102664f20ef8f6cefd0d"}, + {file = "databases-0.7.0.tar.gz", hash = "sha256:ea2d419d3d2eb80595b7ceb8f282056f080af62efe2fb9bcd83562f93ec4b674"}, +] [package.dependencies] aiosqlite = {version = "*", optional = true, markers = "extra == \"sqlite\""} -sqlalchemy = ">=1.4,<1.5" +sqlalchemy = ">=1.4.42,<1.5" [package.extras] aiomysql = ["aiomysql"] @@ -138,14 +284,77 @@ sqlite = ["aiosqlite"] [[package]] name = "greenlet" -version = "1.1.2" +version = "2.0.2" description = "Lightweight in-process concurrent programming" category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +files = [ + {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, + {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, + {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, + {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, + {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, + {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, + {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, + {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, + {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, + {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, + {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, + {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, + {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, + {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, + {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, + {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, + {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, + {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, + {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, + {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, + {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, + {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, +] [package.extras] -docs = ["Sphinx"] +docs = ["Sphinx", "docutils (<0.18)"] +test = ["objgraph", "psutil"] [[package]] name = "h11" @@ -154,6 +363,10 @@ description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] [[package]] name = "html5lib" @@ -162,6 +375,10 @@ description = "HTML parser based on the WHATWG HTML specification" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, + {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, +] [package.dependencies] six = ">=1.9" @@ -180,28 +397,40 @@ description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] [[package]] name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] [[package]] name = "isort" -version = "5.10.1" +version = "5.12.0" description = "A Python utility / library to sort Python imports." category = "dev" optional = false -python-versions = ">=3.6.1,<4.0" +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] [package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] plugins = ["setuptools"] -requirements_deprecated_finder = ["pip-api", "pipreqs"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] [[package]] name = "mypy-extensions" @@ -210,6 +439,10 @@ description = "Experimental type system extensions for programs checked with the category = "dev" optional = false python-versions = "*" +files = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] [[package]] name = "nodeenv" @@ -218,40 +451,53 @@ description = "Node.js virtual environment builder" category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, + {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, +] [package.dependencies] setuptools = "*" [[package]] name = "packaging" -version = "21.3" +version = "23.0" description = "Core utilities for Python packages" category = "dev" optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" +files = [ + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, +] [[package]] name = "pathspec" -version = "0.10.1" +version = "0.11.0" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.0-py3-none-any.whl", hash = "sha256:3a66eb970cbac598f9e5ccb5b2cf58930cd8e3ed86d393d541eaf2d8b1705229"}, + {file = "pathspec-0.11.0.tar.gz", hash = "sha256:64d338d4e0914e91c1792321e6907b5a593f1ab1851de7fc269557a21b30ebbc"}, +] [[package]] name = "platformdirs" -version = "2.5.2" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "2.6.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, + {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, +] [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -260,37 +506,26 @@ description = "plugin and hook calling mechanisms for python" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "dev" -optional = false -python-versions = ">=3.6.8" - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - [[package]] name = "pyright" -version = "1.1.276" +version = "1.1.292" description = "Command line wrapper for pyright" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pyright-1.1.292-py3-none-any.whl", hash = "sha256:23d1f14b15afe38bb7a7117b9861ad0546aff078da312d294e60a727445c23ff"}, + {file = "pyright-1.1.292.tar.gz", hash = "sha256:035ea1af6fabfdcc80c0afb545f677bd377114157d69779cce2a642ff894e51c"}, +] [package.dependencies] nodeenv = ">=1.6.0" @@ -301,11 +536,15 @@ dev = ["twine (>=3.4.1)"] [[package]] name = "pytest" -version = "7.1.3" +version = "7.2.1" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"}, + {file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"}, +] [package.dependencies] attrs = ">=19.2.0" @@ -313,55 +552,66 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -tomli = ">=1.0.0" [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] [[package]] name = "pytest-asyncio" -version = "0.20.1" +version = "0.20.3" description = "Pytest support for asyncio" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.20.3.tar.gz", hash = "sha256:83cbf01169ce3e8eb71c6c278ccb0574d1a7a3bb8eaaf5e50e0ad342afb33b36"}, + {file = "pytest_asyncio-0.20.3-py3-none-any.whl", hash = "sha256:f129998b209d04fcc65c96fc85c11e5316738358909a8399e93be553d7656442"}, +] [package.dependencies] pytest = ">=6.1.0" [package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] [[package]] name = "requests" -version = "2.28.1" +version = "2.28.2" description = "Python HTTP for Humans." category = "main" optional = false python-versions = ">=3.7, <4" +files = [ + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, +] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" -version = "65.5.0" +version = "67.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "setuptools-67.1.0-py3-none-any.whl", hash = "sha256:a7687c12b444eaac951ea87a9627c4f904ac757e7abdc5aac32833234af90378"}, + {file = "setuptools-67.1.0.tar.gz", hash = "sha256:e261cdf010c11a41cb5cb5f1bf3338a7433832029f559a6a7614bd42a967c300"}, +] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -371,6 +621,10 @@ description = "Python 2 and 3 compatibility utilities" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] [[package]] name = "sniffio" @@ -379,6 +633,10 @@ description = "Sniff out which async library your code is running under" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] [[package]] name = "soupsieve" @@ -387,76 +645,103 @@ description = "A modern CSS selector implementation for Beautiful Soup." category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, + {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, +] [[package]] -name = "SQLAlchemy" -version = "1.4.25" +name = "sqlalchemy" +version = "1.4.46" description = "Database Abstraction Library" category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "SQLAlchemy-1.4.46-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:7001f16a9a8e06488c3c7154827c48455d1c1507d7228d43e781afbc8ceccf6d"}, + {file = "SQLAlchemy-1.4.46-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c7a46639ba058d320c9f53a81db38119a74b8a7a1884df44d09fbe807d028aaf"}, + {file = "SQLAlchemy-1.4.46-cp27-cp27m-win32.whl", hash = "sha256:c04144a24103135ea0315d459431ac196fe96f55d3213bfd6d39d0247775c854"}, + {file = "SQLAlchemy-1.4.46-cp27-cp27m-win_amd64.whl", hash = "sha256:7b81b1030c42b003fc10ddd17825571603117f848814a344d305262d370e7c34"}, + {file = "SQLAlchemy-1.4.46-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:939f9a018d2ad04036746e15d119c0428b1e557470361aa798e6e7d7f5875be0"}, + {file = "SQLAlchemy-1.4.46-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b7f4b6aa6e87991ec7ce0e769689a977776db6704947e562102431474799a857"}, + {file = "SQLAlchemy-1.4.46-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dbf17ac9a61e7a3f1c7ca47237aac93cabd7f08ad92ac5b96d6f8dea4287fc1"}, + {file = "SQLAlchemy-1.4.46-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7f8267682eb41a0584cf66d8a697fef64b53281d01c93a503e1344197f2e01fe"}, + {file = "SQLAlchemy-1.4.46-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cb0ad8a190bc22d2112001cfecdec45baffdf41871de777239da6a28ed74b6"}, + {file = "SQLAlchemy-1.4.46-cp310-cp310-win32.whl", hash = "sha256:5f752676fc126edc1c4af0ec2e4d2adca48ddfae5de46bb40adbd3f903eb2120"}, + {file = "SQLAlchemy-1.4.46-cp310-cp310-win_amd64.whl", hash = "sha256:31de1e2c45e67a5ec1ecca6ec26aefc299dd5151e355eb5199cd9516b57340be"}, + {file = "SQLAlchemy-1.4.46-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d68e1762997bfebf9e5cf2a9fd0bcf9ca2fdd8136ce7b24bbd3bbfa4328f3e4a"}, + {file = "SQLAlchemy-1.4.46-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d112b0f3c1bc5ff70554a97344625ef621c1bfe02a73c5d97cac91f8cd7a41e"}, + {file = "SQLAlchemy-1.4.46-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69fac0a7054d86b997af12dc23f581cf0b25fb1c7d1fed43257dee3af32d3d6d"}, + {file = "SQLAlchemy-1.4.46-cp311-cp311-win32.whl", hash = "sha256:887865924c3d6e9a473dc82b70977395301533b3030d0f020c38fd9eba5419f2"}, + {file = "SQLAlchemy-1.4.46-cp311-cp311-win_amd64.whl", hash = "sha256:984ee13543a346324319a1fb72b698e521506f6f22dc37d7752a329e9cd00a32"}, + {file = "SQLAlchemy-1.4.46-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:9167d4227b56591a4cc5524f1b79ccd7ea994f36e4c648ab42ca995d28ebbb96"}, + {file = "SQLAlchemy-1.4.46-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d61e9ecc849d8d44d7f80894ecff4abe347136e9d926560b818f6243409f3c86"}, + {file = "SQLAlchemy-1.4.46-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3ec187acf85984263299a3f15c34a6c0671f83565d86d10f43ace49881a82718"}, + {file = "SQLAlchemy-1.4.46-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9883f5fae4fd8e3f875adc2add69f8b945625811689a6c65866a35ee9c0aea23"}, + {file = "SQLAlchemy-1.4.46-cp36-cp36m-win32.whl", hash = "sha256:535377e9b10aff5a045e3d9ada8a62d02058b422c0504ebdcf07930599890eb0"}, + {file = "SQLAlchemy-1.4.46-cp36-cp36m-win_amd64.whl", hash = "sha256:18cafdb27834fa03569d29f571df7115812a0e59fd6a3a03ccb0d33678ec8420"}, + {file = "SQLAlchemy-1.4.46-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:a1ad90c97029cc3ab4ffd57443a20fac21d2ec3c89532b084b073b3feb5abff3"}, + {file = "SQLAlchemy-1.4.46-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4847f4b1d822754e35707db913396a29d874ee77b9c3c3ef3f04d5a9a6209618"}, + {file = "SQLAlchemy-1.4.46-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c5a99282848b6cae0056b85da17392a26b2d39178394fc25700bcf967e06e97a"}, + {file = "SQLAlchemy-1.4.46-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4b1cc7835b39835c75cf7c20c926b42e97d074147c902a9ebb7cf2c840dc4e2"}, + {file = "SQLAlchemy-1.4.46-cp37-cp37m-win32.whl", hash = "sha256:c522e496f9b9b70296a7675272ec21937ccfc15da664b74b9f58d98a641ce1b6"}, + {file = "SQLAlchemy-1.4.46-cp37-cp37m-win_amd64.whl", hash = "sha256:ae067ab639fa499f67ded52f5bc8e084f045d10b5ac7bb928ae4ca2b6c0429a5"}, + {file = "SQLAlchemy-1.4.46-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:e3c1808008124850115a3f7e793a975cfa5c8a26ceeeb9ff9cbb4485cac556df"}, + {file = "SQLAlchemy-1.4.46-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d164df3d83d204c69f840da30b292ac7dc54285096c6171245b8d7807185aa"}, + {file = "SQLAlchemy-1.4.46-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b33ffbdbbf5446cf36cd4cc530c9d9905d3c2fe56ed09e25c22c850cdb9fac92"}, + {file = "SQLAlchemy-1.4.46-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d94682732d1a0def5672471ba42a29ff5e21bb0aae0afa00bb10796fc1e28dd"}, + {file = "SQLAlchemy-1.4.46-cp38-cp38-win32.whl", hash = "sha256:f8cb80fe8d14307e4124f6fad64dfd87ab749c9d275f82b8b4ec84c84ecebdbe"}, + {file = "SQLAlchemy-1.4.46-cp38-cp38-win_amd64.whl", hash = "sha256:07e48cbcdda6b8bc7a59d6728bd3f5f574ffe03f2c9fb384239f3789c2d95c2e"}, + {file = "SQLAlchemy-1.4.46-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:1b1e5e96e2789d89f023d080bee432e2fef64d95857969e70d3cadec80bd26f0"}, + {file = "SQLAlchemy-1.4.46-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3714e5b33226131ac0da60d18995a102a17dddd42368b7bdd206737297823ad"}, + {file = "SQLAlchemy-1.4.46-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:955162ad1a931fe416eded6bb144ba891ccbf9b2e49dc7ded39274dd9c5affc5"}, + {file = "SQLAlchemy-1.4.46-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6e4cb5c63f705c9d546a054c60d326cbde7421421e2d2565ce3e2eee4e1a01f"}, + {file = "SQLAlchemy-1.4.46-cp39-cp39-win32.whl", hash = "sha256:51e1ba2884c6a2b8e19109dc08c71c49530006c1084156ecadfaadf5f9b8b053"}, + {file = "SQLAlchemy-1.4.46-cp39-cp39-win_amd64.whl", hash = "sha256:315676344e3558f1f80d02535f410e80ea4e8fddba31ec78fe390eff5fb8f466"}, + {file = "SQLAlchemy-1.4.46.tar.gz", hash = "sha256:6913b8247d8a292ef8315162a51931e2b40ce91681f1b6f18f697045200c4a30"}, +] [package.dependencies] greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} [package.extras] aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.0)", "greenlet (!=0.4.17)"] -mariadb_connector = ["mariadb (>=1.0.1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] mssql = ["pyodbc"] -mssql_pymssql = ["pymssql"] -mssql_pyodbc = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql_connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql_pg8000 = ["pg8000 (>=1.16.6)"] -postgresql_psycopg2binary = ["psycopg2-binary"] -postgresql_psycopg2cffi = ["psycopg2cffi"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3_binary"] +sqlcipher = ["sqlcipher3-binary"] [[package]] name = "starlette" -version = "0.17.1" +version = "0.23.1" description = "The little ASGI library that shines." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "starlette-0.23.1-py3-none-any.whl", hash = "sha256:ec69736c90be8dbfc6ec6800ba6feb79c8c44f9b1706c0b2bb27f936bcf362cc"}, + {file = "starlette-0.23.1.tar.gz", hash = "sha256:8510e5b3d670326326c5c1d4cb657cc66832193fe5d5b7015a51c7b1e1b1bf42"}, +] [package.dependencies] -anyio = ">=3.0.0,<4" +anyio = ">=3.4.0,<5" [package.extras] -full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "main" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "typing-extensions" -version = "4.4.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" -optional = false -python-versions = ">=3.7" +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] [[package]] name = "ulid-py" @@ -465,14 +750,22 @@ description = "Universally Unique Lexicographically Sortable Identifier" category = "main" optional = false python-versions = "*" +files = [ + {file = "ulid-py-1.1.0.tar.gz", hash = "sha256:dc6884be91558df077c3011b9fb0c87d1097cb8fc6534b11f310161afd5738f0"}, + {file = "ulid_py-1.1.0-py2.py3-none-any.whl", hash = "sha256:b56a0f809ef90d6020b21b89a87a48edc7c03aea80e5ed5174172e82d76e3987"}, +] [[package]] name = "urllib3" -version = "1.26.12" +version = "1.26.14" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, + {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, +] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] @@ -481,18 +774,22 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "uvicorn" -version = "0.19.0" +version = "0.20.0" description = "The lightning-fast ASGI server." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "uvicorn-0.20.0-py3-none-any.whl", hash = "sha256:c3ed1598a5668208723f2bb49336f4509424ad198d6ab2615b7783db58d919fd"}, + {file = "uvicorn-0.20.0.tar.gz", hash = "sha256:a4e12017b940247f836bc90b72e725d7dfd0c8ed1c51eb365f5ba30d9f5127d8"}, +] [package.dependencies] click = ">=7.0" h11 = ">=0.8" [package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.0)"] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "webencodings" @@ -501,274 +798,12 @@ description = "Character encoding aliases for legacy web content" category = "main" optional = false python-versions = "*" - -[metadata] -lock-version = "1.1" -python-versions = "^3.10" -content-hash = "d61472062b64d83922624a383db8d855764147f0e646b323f204d831778311a5" - -[metadata.files] -aiosqlite = [ - {file = "aiosqlite-0.17.0-py3-none-any.whl", hash = "sha256:6c49dc6d3405929b1d08eeccc72306d3677503cc5e5e43771efc1e00232e8231"}, - {file = "aiosqlite-0.17.0.tar.gz", hash = "sha256:f0e6acc24bc4864149267ac82fb46dfb3be4455f99fe21df82609cc6e6baee51"}, -] -anyio = [ - {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, - {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, -] -attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, -] -beautifulsoup4 = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, -] -black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, -] -certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, -] -click = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] -colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, -] -databases = [ - {file = "databases-0.6.1-py3-none-any.whl", hash = "sha256:47fae85d82d8227049f08b154019913c3ad2f6057ceb0b5ebb36703be6f5666b"}, - {file = "databases-0.6.1.tar.gz", hash = "sha256:0a69c6983a27e10a5b75ffa094486f1febadd9d5a8db016e69b8c2f6a354dc30"}, -] -greenlet = [ - {file = "greenlet-1.1.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6"}, - {file = "greenlet-1.1.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:aec52725173bd3a7b56fe91bc56eccb26fbdff1386ef123abb63c84c5b43b63a"}, - {file = "greenlet-1.1.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:833e1551925ed51e6b44c800e71e77dacd7e49181fdc9ac9a0bf3714d515785d"}, - {file = "greenlet-1.1.2-cp27-cp27m-win32.whl", hash = "sha256:aa5b467f15e78b82257319aebc78dd2915e4c1436c3c0d1ad6f53e47ba6e2713"}, - {file = "greenlet-1.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:40b951f601af999a8bf2ce8c71e8aaa4e8c6f78ff8afae7b808aae2dc50d4c40"}, - {file = "greenlet-1.1.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:95e69877983ea39b7303570fa6760f81a3eec23d0e3ab2021b7144b94d06202d"}, - {file = "greenlet-1.1.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:356b3576ad078c89a6107caa9c50cc14e98e3a6c4874a37c3e0273e4baf33de8"}, - {file = "greenlet-1.1.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8639cadfda96737427330a094476d4c7a56ac03de7265622fcf4cfe57c8ae18d"}, - {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497"}, - {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1"}, - {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58"}, - {file = "greenlet-1.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b336501a05e13b616ef81ce329c0e09ac5ed8c732d9ba7e3e983fcc1a9e86965"}, - {file = "greenlet-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708"}, - {file = "greenlet-1.1.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23"}, - {file = "greenlet-1.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee"}, - {file = "greenlet-1.1.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:fa877ca7f6b48054f847b61d6fa7bed5cebb663ebc55e018fda12db09dcc664c"}, - {file = "greenlet-1.1.2-cp35-cp35m-win32.whl", hash = "sha256:7cbd7574ce8e138bda9df4efc6bf2ab8572c9aff640d8ecfece1b006b68da963"}, - {file = "greenlet-1.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:903bbd302a2378f984aef528f76d4c9b1748f318fe1294961c072bdc7f2ffa3e"}, - {file = "greenlet-1.1.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:049fe7579230e44daef03a259faa24511d10ebfa44f69411d99e6a184fe68073"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:dd0b1e9e891f69e7675ba5c92e28b90eaa045f6ab134ffe70b52e948aa175b3c"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7418b6bfc7fe3331541b84bb2141c9baf1ec7132a7ecd9f375912eca810e714e"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168"}, - {file = "greenlet-1.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b8c008de9d0daba7b6666aa5bbfdc23dcd78cafc33997c9b7741ff6353bafb7f"}, - {file = "greenlet-1.1.2-cp36-cp36m-win32.whl", hash = "sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa"}, - {file = "greenlet-1.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d"}, - {file = "greenlet-1.1.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fdcec0b8399108577ec290f55551d926d9a1fa6cad45882093a7a07ac5ec147b"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:93f81b134a165cc17123626ab8da2e30c0455441d4ab5576eed73a64c025b25c"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5"}, - {file = "greenlet-1.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c5d5b35f789a030ebb95bff352f1d27a93d81069f2adb3182d99882e095cefe"}, - {file = "greenlet-1.1.2-cp37-cp37m-win32.whl", hash = "sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc"}, - {file = "greenlet-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06"}, - {file = "greenlet-1.1.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:eb6ea6da4c787111adf40f697b4e58732ee0942b5d3bd8f435277643329ba627"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f3acda1924472472ddd60c29e5b9db0cec629fbe3c5c5accb74d6d6d14773478"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b"}, - {file = "greenlet-1.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bde6792f313f4e918caabc46532aa64aa27a0db05d75b20edfc5c6f46479de2"}, - {file = "greenlet-1.1.2-cp38-cp38-win32.whl", hash = "sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd"}, - {file = "greenlet-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3"}, - {file = "greenlet-1.1.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:572e1787d1460da79590bf44304abbc0a2da944ea64ec549188fa84d89bba7ab"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:be5f425ff1f5f4b3c1e33ad64ab994eed12fc284a6ea71c5243fd564502ecbe5"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3"}, - {file = "greenlet-1.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3"}, - {file = "greenlet-1.1.2-cp39-cp39-win32.whl", hash = "sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf"}, - {file = "greenlet-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd"}, - {file = "greenlet-1.1.2.tar.gz", hash = "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a"}, -] -h11 = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] -html5lib = [ - {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, - {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, -] -idna = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -nodeenv = [ - {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, - {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] -pathspec = [ - {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, - {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, -] -platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pyright = [ - {file = "pyright-1.1.276-py3-none-any.whl", hash = "sha256:d9388405ea20a55446cb7809b1746158bdf557f9162b476f5aed71173f4ffd2b"}, - {file = "pyright-1.1.276.tar.gz", hash = "sha256:debaa08f6975dd381b9408880e36bb781ba7a1a6cf24b7868e83be41b6c8cb75"}, -] -pytest = [ - {file = "pytest-7.1.3-py3-none-any.whl", hash = "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7"}, - {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"}, -] -pytest-asyncio = [ - {file = "pytest-asyncio-0.20.1.tar.gz", hash = "sha256:626699de2a747611f3eeb64168b3575f70439b06c3d0206e6ceaeeb956e65519"}, - {file = "pytest_asyncio-0.20.1-py3-none-any.whl", hash = "sha256:2c85a835df33fda40fe3973b451e0c194ca11bc2c007eabff90bb3d156fc172b"}, -] -requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, -] -setuptools = [ - {file = "setuptools-65.5.0-py3-none-any.whl", hash = "sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356"}, - {file = "setuptools-65.5.0.tar.gz", hash = "sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -sniffio = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, -] -soupsieve = [ - {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, - {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, -] -SQLAlchemy = [ - {file = "SQLAlchemy-1.4.25-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:a36ea43919e51b0de0c0bc52bcfdad7683f6ea9fb81b340cdabb9df0e045e0f7"}, - {file = "SQLAlchemy-1.4.25-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:75cd5d48389a7635393ff5a9214b90695c06b3d74912109c3b00ce7392b69c6c"}, - {file = "SQLAlchemy-1.4.25-cp27-cp27m-win32.whl", hash = "sha256:16ef07e102d2d4f974ba9b0d4ac46345a411ad20ad988b3654d59ff08e553b1c"}, - {file = "SQLAlchemy-1.4.25-cp27-cp27m-win_amd64.whl", hash = "sha256:a79abdb404d9256afb8aeaa0d3a4bc7d3b6d8b66103d8b0f2f91febd3909976e"}, - {file = "SQLAlchemy-1.4.25-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7ad59e2e16578b6c1a2873e4888134112365605b08a6067dd91e899e026efa1c"}, - {file = "SQLAlchemy-1.4.25-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:a505ecc0642f52e7c65afb02cc6181377d833b7df0994ecde15943b18d0fa89c"}, - {file = "SQLAlchemy-1.4.25-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a28fe28c359835f3be20c89efd517b35e8f97dbb2ca09c6cf0d9ac07f62d7ef6"}, - {file = "SQLAlchemy-1.4.25-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:41a916d815a3a23cb7fff8d11ad0c9b93369ac074e91e428075e088fe57d5358"}, - {file = "SQLAlchemy-1.4.25-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:842c49dd584aedd75c2ee05f6c950730c3ffcddd21c5824ed0f820808387e1e3"}, - {file = "SQLAlchemy-1.4.25-cp36-cp36m-win32.whl", hash = "sha256:6b602e3351f59f3999e9fb8b87e5b95cb2faab6a6ecdb482382ac6fdfbee5266"}, - {file = "SQLAlchemy-1.4.25-cp36-cp36m-win_amd64.whl", hash = "sha256:6400b22e4e41cc27623a9a75630b7719579cd9a3a2027bcf16ad5aaa9a7806c0"}, - {file = "SQLAlchemy-1.4.25-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:dd4ed12a775f2cde4519f4267d3601990a97d8ecde5c944ab06bfd6e8e8ea177"}, - {file = "SQLAlchemy-1.4.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b7778a205f956755e05721eebf9f11a6ac18b2409bff5db53ce5fe7ede79831"}, - {file = "SQLAlchemy-1.4.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:08d9396a2a38e672133266b31ed39b2b1f2b5ec712b5bff5e08033970563316a"}, - {file = "SQLAlchemy-1.4.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e93978993a2ad0af43f132be3ea8805f56b2f2cd223403ec28d3e7d5c6d39ed1"}, - {file = "SQLAlchemy-1.4.25-cp37-cp37m-win32.whl", hash = "sha256:0566a6e90951590c0307c75f9176597c88ef4be2724958ca1d28e8ae05ec8822"}, - {file = "SQLAlchemy-1.4.25-cp37-cp37m-win_amd64.whl", hash = "sha256:0b08a53e40b34205acfeb5328b832f44437956d673a6c09fce55c66ab0e54916"}, - {file = "SQLAlchemy-1.4.25-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:33a1e86abad782e90976de36150d910748b58e02cd7d35680d441f9a76806c18"}, - {file = "SQLAlchemy-1.4.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ed67aae8cde4d32aacbdba4f7f38183d14443b714498eada5e5a7a37769c0b7"}, - {file = "SQLAlchemy-1.4.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1ebd69365717becaa1b618220a3df97f7c08aa68e759491de516d1c3667bba54"}, - {file = "SQLAlchemy-1.4.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0cd2d5c7ea96d3230cb20acac3d89de3b593339c1447b4d64bfcf4eac1110"}, - {file = "SQLAlchemy-1.4.25-cp38-cp38-win32.whl", hash = "sha256:c211e8ec81522ce87b0b39f0cf0712c998d4305a030459a0e115a2b3dc71598f"}, - {file = "SQLAlchemy-1.4.25-cp38-cp38-win_amd64.whl", hash = "sha256:9a1df8c93a0dd9cef0839917f0c6c49f46c75810cf8852be49884da4a7de3c59"}, - {file = "SQLAlchemy-1.4.25-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:1b38db2417b9f7005d6ceba7ce2a526bf10e3f6f635c0f163e6ed6a42b5b62b2"}, - {file = "SQLAlchemy-1.4.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e37621b37c73b034997b5116678862f38ee70e5a054821c7b19d0e55df270dec"}, - {file = "SQLAlchemy-1.4.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:91cd87d1de0111eaca11ccc3d31af441c753fa2bc22df72e5009cfb0a1af5b03"}, - {file = "SQLAlchemy-1.4.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90fe429285b171bcc252e21515703bdc2a4721008d1f13aa5b7150336f8a8493"}, - {file = "SQLAlchemy-1.4.25-cp39-cp39-win32.whl", hash = "sha256:6003771ea597346ab1e97f2f58405c6cacbf6a308af3d28a9201a643c0ac7bb3"}, - {file = "SQLAlchemy-1.4.25-cp39-cp39-win_amd64.whl", hash = "sha256:9ebe49c3960aa2219292ea2e5df6acdc425fc828f2f3d50b4cfae1692bcb5f02"}, - {file = "SQLAlchemy-1.4.25.tar.gz", hash = "sha256:1adf3d25e2e33afbcd48cfad8076f9378793be43e7fec3e4334306cac6bec138"}, -] -starlette = [ - {file = "starlette-0.17.1-py3-none-any.whl", hash = "sha256:26a18cbda5e6b651c964c12c88b36d9898481cd428ed6e063f5f29c418f73050"}, - {file = "starlette-0.17.1.tar.gz", hash = "sha256:57eab3cc975a28af62f6faec94d355a410634940f10b30d68d31cb5ec1b44ae8"}, -] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] -tomli = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] -typing-extensions = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, -] -ulid-py = [ - {file = "ulid-py-1.1.0.tar.gz", hash = "sha256:dc6884be91558df077c3011b9fb0c87d1097cb8fc6534b11f310161afd5738f0"}, - {file = "ulid_py-1.1.0-py2.py3-none-any.whl", hash = "sha256:b56a0f809ef90d6020b21b89a87a48edc7c03aea80e5ed5174172e82d76e3987"}, -] -urllib3 = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, -] -uvicorn = [ - {file = "uvicorn-0.19.0-py3-none-any.whl", hash = "sha256:cc277f7e73435748e69e075a721841f7c4a95dba06d12a72fe9874acced16f6f"}, - {file = "uvicorn-0.19.0.tar.gz", hash = "sha256:cf538f3018536edb1f4a826311137ab4944ed741d52aeb98846f52215de57f25"}, -] -webencodings = [ +files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "5406c84bd8bd69384b125fbcb104f855df4530427a9568b72f5d623853d5b593" diff --git a/pyproject.toml b/pyproject.toml index 60d94b3..623ed2d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,21 +6,20 @@ authors = ["ducklet "] license = "LOL" [tool.poetry.dependencies] -python = "^3.10" +python = "^3.11" requests = "^2.25.1" beautifulsoup4 = "^4.9.3" html5lib = "^1.1" -starlette = "^0.17.0" +starlette = "^0.23.1" ulid-py = "^1.1.0" -databases = {extras = ["sqlite"], version = "^0.6.1"} -toml = "^0.10.2" -uvicorn = "^0.19.0" +databases = {extras = ["sqlite"], version = "^0.7.0"} +uvicorn = "^0.20.0" -[tool.poetry.group.fixes.dependencies] -# `databases` is having issues with new versions of SQLAlchemy 1.4, -# and `greenlet` is also always a pain. -SQLAlchemy = "1.4.25" -greenlet = "1.1.2" +# [tool.poetry.group.fixes.dependencies] +# # `databases` is having issues with new versions of SQLAlchemy 1.4, +# # and `greenlet` is also always a pain. +# SQLAlchemy = "1.4.25" +# greenlet = "1.1.2" [tool.poetry.group.dev] optional = true @@ -37,4 +36,4 @@ requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.pyright] -pythonVersion = "3.10" +pythonVersion = "3.11" diff --git a/unwind/config.py b/unwind/config.py index 6382cf5..6cc255a 100644 --- a/unwind/config.py +++ b/unwind/config.py @@ -1,8 +1,7 @@ import os +import tomllib from pathlib import Path -import toml - datadir = Path(os.getenv("UNWIND_DATA") or "./data") cachedir = ( Path(cachedir) @@ -14,7 +13,8 @@ loglevel = os.getenv("UNWIND_LOGLEVEL") or ("DEBUG" if debug else "INFO") storage_path = os.getenv("UNWIND_STORAGE", datadir / "db.sqlite") config_path = os.getenv("UNWIND_CONFIG", datadir / "config.toml") -_config = toml.load(config_path) +with open(config_path, "rb") as fd: + _config = tomllib.load(fd) api_base = _config["api"].get("base", "/api/") api_cors = _config["api"].get("cors", "*") From 3320d53eda370ab3c981d3a15350806edc1ccae2 Mon Sep 17 00:00:00 2001 From: ducklet Date: Thu, 2 Feb 2023 23:46:02 +0100 Subject: [PATCH 02/25] use native union type syntax --- unwind/db.py | 12 ++++++------ unwind/imdb.py | 3 +-- unwind/imdb_import.py | 10 +++++----- unwind/models.py | 29 ++++++++++++++--------------- unwind/request.py | 6 +++--- unwind/types.py | 4 ++-- unwind/web.py | 6 +++--- unwind/web_models.py | 18 +++++++++--------- 8 files changed, 43 insertions(+), 45 deletions(-) diff --git a/unwind/db.py b/unwind/db.py index 13217e9..bff8c20 100644 --- a/unwind/db.py +++ b/unwind/db.py @@ -4,7 +4,7 @@ import logging import re import threading from pathlib import Path -from typing import Any, Iterable, Literal, Optional, Type, TypeVar, Union +from typing import Any, Iterable, Literal, Type, TypeVar import sqlalchemy from databases import Database @@ -26,7 +26,7 @@ from .types import ULID log = logging.getLogger(__name__) T = TypeVar("T") -_shared_connection: Optional[Database] = None +_shared_connection: Database | None = None async def open_connection_pool() -> None: @@ -131,7 +131,7 @@ async def apply_db_patches(db: Database): await db.execute("vacuum") -async def get_import_progress() -> Optional[Progress]: +async def get_import_progress() -> Progress | None: """Return the latest import progress.""" return await get(Progress, type="import-imdb-movies", order_by="started DESC") @@ -244,7 +244,7 @@ ModelType = TypeVar("ModelType") async def get( model: Type[ModelType], *, order_by: str = None, **kwds -) -> Optional[ModelType]: +) -> ModelType | None: """Load a model instance from the database. Passing `kwds` allows to filter the instance to load. You have to encode the @@ -415,7 +415,7 @@ async def find_ratings( limit_rows: int = 10, user_ids: Iterable[str] = [], ): - values: dict[str, Union[int, str]] = { + values: dict[str, int | str] = { "limit_rows": limit_rows, } @@ -598,7 +598,7 @@ async def find_movies( include_unrated: bool = False, user_ids: list[ULID] = [], ) -> Iterable[tuple[Movie, list[Rating]]]: - values: dict[str, Union[int, str]] = { + values: dict[str, int | str] = { "limit_rows": limit_rows, "skip_rows": skip_rows, } diff --git a/unwind/imdb.py b/unwind/imdb.py index e541277..9288e6f 100644 --- a/unwind/imdb.py +++ b/unwind/imdb.py @@ -2,7 +2,6 @@ import logging import re from collections import namedtuple from datetime import datetime -from typing import Optional, Tuple from urllib.parse import urljoin from . import db @@ -153,7 +152,7 @@ def movie_and_rating_from_item(item) -> tuple[Movie, Rating]: ForgedRequest = namedtuple("ForgedRequest", "url headers") -async def parse_page(url) -> Tuple[list[Rating], Optional[str]]: +async def parse_page(url) -> tuple[list[Rating], str | None]: ratings = [] soup = soup_from_url(url) diff --git a/unwind/imdb_import.py b/unwind/imdb_import.py index 45360aa..61a892c 100644 --- a/unwind/imdb_import.py +++ b/unwind/imdb_import.py @@ -4,7 +4,7 @@ import logging from dataclasses import dataclass, fields from datetime import datetime, timezone from pathlib import Path -from typing import Generator, Literal, Optional, Type, TypeVar, overload +from typing import Generator, Literal, Type, TypeVar, overload from . import config, db, request from .db import add_or_update_many_movies @@ -27,10 +27,10 @@ class BasicRow: primaryTitle: str originalTitle: str isAdult: bool - startYear: Optional[int] - endYear: Optional[int] - runtimeMinutes: Optional[int] - genres: Optional[set[str]] + startYear: int | None + endYear: int | None + runtimeMinutes: int | None + genres: set[str] | None @classmethod def from_row(cls, row): diff --git a/unwind/models.py b/unwind/models.py index 37cd48d..674337d 100644 --- a/unwind/models.py +++ b/unwind/models.py @@ -9,7 +9,6 @@ from typing import ( ClassVar, Container, Literal, - Optional, Type, TypeVar, Union, @@ -25,7 +24,7 @@ JSONObject = dict[str, JSON] T = TypeVar("T") -def annotations(tp: Type) -> Optional[tuple]: +def annotations(tp: Type) -> tuple | None: return tp.__metadata__ if hasattr(tp, "__metadata__") else None @@ -61,7 +60,7 @@ def is_optional(tp: Type) -> bool: return len(args) == 2 and type(None) in args -def optional_type(tp: Type) -> Optional[Type]: +def optional_type(tp: Type) -> Type | None: """Return the wrapped type from an optional type. For example this will return `int` for `Optional[int]`. @@ -206,7 +205,7 @@ class Progress: type: str = None state: str = None started: datetime = field(default_factory=utcnow) - stopped: Optional[str] = None + stopped: str | None = None @property def _state(self) -> dict: @@ -243,15 +242,15 @@ class Movie: id: ULID = field(default_factory=ULID) title: str = None # canonical title (usually English) - original_title: Optional[ - str - ] = None # original title (usually transscribed to latin script) + original_title: str | None = ( + None # original title (usually transscribed to latin script) + ) release_year: int = None # canonical release date media_type: str = None imdb_id: str = None - imdb_score: Optional[int] = None # range: [0,100] - imdb_votes: Optional[int] = None - runtime: Optional[int] = None # minutes + imdb_score: int | None = None # range: [0,100] + imdb_votes: int | None = None + runtime: int | None = None # minutes genres: set[str] = None created: datetime = field(default_factory=utcnow) updated: datetime = field(default_factory=utcnow) @@ -292,7 +291,7 @@ dataclass containing the ID of the linked data. The contents of the Relation are ignored or discarded when using `asplain`, `fromplain`, and `validate`. """ -Relation = Annotated[Optional[T], _RelationSentinel] +Relation = Annotated[T | None, _RelationSentinel] @dataclass @@ -309,8 +308,8 @@ class Rating: score: int = None # range: [0,100] rating_date: datetime = None - favorite: Optional[bool] = None - finished: Optional[bool] = None + favorite: bool | None = None + finished: bool | None = None def __eq__(self, other): """Return wether two Ratings are equal. @@ -342,11 +341,11 @@ class User: secret: str = None groups: list[dict[str, str]] = field(default_factory=list) - def has_access(self, group_id: Union[ULID, str], access: Access = "r"): + def has_access(self, group_id: ULID | str, access: Access = "r"): group_id = group_id if isinstance(group_id, str) else str(group_id) return any(g["id"] == group_id and access == g["access"] for g in self.groups) - def set_access(self, group_id: Union[ULID, str], access: Access): + def set_access(self, group_id: ULID | str, access: Access): group_id = group_id if isinstance(group_id, str) else str(group_id) for g in self.groups: if g["id"] == group_id: diff --git a/unwind/request.py b/unwind/request.py index 81f9f29..0b6e07c 100644 --- a/unwind/request.py +++ b/unwind/request.py @@ -11,7 +11,7 @@ from hashlib import md5 from pathlib import Path from random import random from time import sleep, time -from typing import Callable, Optional, Union +from typing import Callable import bs4 import requests @@ -142,7 +142,7 @@ class RedirectError(RuntimeError): super().__init__(f"Redirected: {from_url} -> {to_url}") -def cache_path(req) -> Optional[Path]: +def cache_path(req) -> Path | None: if not config.cachedir: return sig = repr(req.url) # + repr(sorted(req.headers.items())) @@ -215,7 +215,7 @@ def last_modified_from_file(path: Path): def download( url: str, - file_path: Union[Path, str] = None, + file_path: Path | str = None, *, replace_existing: bool = None, only_if_newer: bool = False, diff --git a/unwind/types.py b/unwind/types.py index a54e0ec..94c0e00 100644 --- a/unwind/types.py +++ b/unwind/types.py @@ -1,5 +1,5 @@ import re -from typing import Union, cast +from typing import cast import ulid from ulid.hints import Buffer @@ -16,7 +16,7 @@ class ULID(ulid.ULID): _pattern = re.compile(r"^[0-9A-HJKMNP-TV-Z]{26}$") - def __init__(self, buffer: Union[Buffer, ulid.ULID, str, None] = None): + def __init__(self, buffer: Buffer | ulid.ULID | str | None = None): if isinstance(buffer, str): if not self._pattern.search(buffer): raise ValueError("Invalid ULID.") diff --git a/unwind/web.py b/unwind/web.py index e194c10..b8705a1 100644 --- a/unwind/web.py +++ b/unwind/web.py @@ -2,7 +2,7 @@ import asyncio import logging import secrets from json.decoder import JSONDecodeError -from typing import Literal, Optional, overload +from typing import Literal, overload from starlette.applications import Starlette from starlette.authentication import ( @@ -97,7 +97,7 @@ def yearcomp(s: str): return comp, int(s) -def as_int(x, *, max: int = None, min: Optional[int] = 1, default: int = None): +def as_int(x, *, max: int = None, min: int | None = 1, default: int = None): try: if not isinstance(x, int): x = int(x) @@ -158,7 +158,7 @@ def is_admin(request): return "admin" in request.auth.scopes -async def auth_user(request) -> Optional[User]: +async def auth_user(request) -> User | None: if not isinstance(request.user, AuthedUser): return diff --git a/unwind/web_models.py b/unwind/web_models.py index 06bcb8c..e514c5f 100644 --- a/unwind/web_models.py +++ b/unwind/web_models.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import Container, Iterable, Optional +from typing import Container, Iterable from . import imdb, models @@ -10,14 +10,14 @@ Score100 = int # [0, 100] @dataclass class Rating: canonical_title: str - imdb_score: Optional[Score100] - imdb_votes: Optional[int] + imdb_score: Score100 | None + imdb_votes: int | None media_type: str movie_imdb_id: str - original_title: Optional[str] + original_title: str | None release_year: int - user_id: Optional[str] - user_score: Optional[Score100] + user_id: str | None + user_score: Score100 | None @classmethod def from_movie(cls, movie: models.Movie, *, rating: models.Rating = None): @@ -37,11 +37,11 @@ class Rating: @dataclass class RatingAggregate: canonical_title: str - imdb_score: Optional[Score100] - imdb_votes: Optional[int] + imdb_score: Score100 | None + imdb_votes: int | None link: URL media_type: str - original_title: Optional[str] + original_title: str | None user_scores: list[Score100] year: int From 418116afac22246876cc9fde9f6358fd22140a43 Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 01:12:09 +0100 Subject: [PATCH 03/25] improve typing correctness --- unwind/db.py | 18 ++++++++++-------- unwind/imdb_import.py | 17 +++++++++-------- unwind/models.py | 2 +- unwind/request.py | 18 ++++++++++-------- unwind/utils.py | 5 ++++- unwind/web.py | 15 ++++++++++----- unwind/web_models.py | 2 +- 7 files changed, 45 insertions(+), 32 deletions(-) diff --git a/unwind/db.py b/unwind/db.py index bff8c20..e32d4c0 100644 --- a/unwind/db.py +++ b/unwind/db.py @@ -136,7 +136,7 @@ async def get_import_progress() -> Progress | None: return await get(Progress, type="import-imdb-movies", order_by="started DESC") -async def stop_import_progress(*, error: BaseException = None): +async def stop_import_progress(*, error: BaseException | None = None): """Stop the current import. If an error is given, it will be logged to the progress state. @@ -176,6 +176,8 @@ async def set_import_progress(progress: float) -> Progress: else: await add(current) + return current + _lock = threading.Lock() _prelock = threading.Lock() @@ -243,7 +245,7 @@ ModelType = TypeVar("ModelType") async def get( - model: Type[ModelType], *, order_by: str = None, **kwds + model: Type[ModelType], *, order_by: str | None = None, **kwds ) -> ModelType | None: """Load a model instance from the database. @@ -406,12 +408,12 @@ def sql_escape(s: str, char="#"): async def find_ratings( *, - title: str = None, - media_type: str = None, + title: str | None = None, + media_type: str | None = None, exact: bool = False, ignore_tv_episodes: bool = False, include_unrated: bool = False, - yearcomp: tuple[Literal["<", "=", ">"], int] = None, + yearcomp: tuple[Literal["<", "=", ">"], int] | None = None, limit_rows: int = 10, user_ids: Iterable[str] = [], ): @@ -588,11 +590,11 @@ async def ratings_for_movies( async def find_movies( *, - title: str = None, - media_type: str = None, + title: str | None = None, + media_type: str | None = None, exact: bool = False, ignore_tv_episodes: bool = False, - yearcomp: tuple[Literal["<", "=", ">"], int] = None, + yearcomp: tuple[Literal["<", "=", ">"], int] | None = None, limit_rows: int = 10, skip_rows: int = 0, include_unrated: bool = False, diff --git a/unwind/imdb_import.py b/unwind/imdb_import.py index 61a892c..3557993 100644 --- a/unwind/imdb_import.py +++ b/unwind/imdb_import.py @@ -100,7 +100,7 @@ title_types = { } -def gz_mtime(path) -> datetime: +def gz_mtime(path: Path) -> datetime: """Return the timestamp of the compressed file.""" g = gzip.GzipFile(path, "rb") g.peek(1) # start reading the file to fill the timestamp field @@ -108,7 +108,7 @@ def gz_mtime(path) -> datetime: return datetime.fromtimestamp(g.mtime).replace(tzinfo=timezone.utc) -def count_lines(path) -> int: +def count_lines(path: Path) -> int: i = 0 one_mb = 2 ** 20 @@ -124,20 +124,21 @@ def count_lines(path) -> int: @overload def read_imdb_tsv( - path, row_type, *, unpack: Literal[False] + path: Path, row_type, *, unpack: Literal[False] ) -> Generator[list[str], None, None]: ... @overload def read_imdb_tsv( - path, row_type: Type[T], *, unpack: Literal[True] = True + path: Path, row_type: Type[T], *, unpack: Literal[True] = True ) -> Generator[T, None, None]: ... -def read_imdb_tsv(path, row_type, *, unpack=True): +def read_imdb_tsv(path: Path, row_type, *, unpack=True): with gzip.open(path, "rt", newline="") as f: + rows = csv.reader(f, delimiter="\t", quoting=csv.QUOTE_NONE) # skip header line @@ -161,7 +162,7 @@ def read_imdb_tsv(path, row_type, *, unpack=True): raise -def read_ratings(path): +def read_ratings(path: Path): mtime = gz_mtime(path) rows = read_imdb_tsv(path, RatingRow) @@ -171,13 +172,13 @@ def read_ratings(path): yield m -def read_ratings_as_mapping(path): +def read_ratings_as_mapping(path: Path): """Optimized function to quickly load all ratings.""" rows = read_imdb_tsv(path, RatingRow, unpack=False) return {r[0]: (round(100 * (float(r[1]) - 1) / 9), int(r[2])) for r in rows} -def read_basics(path): +def read_basics(path: Path): mtime = gz_mtime(path) rows = read_imdb_tsv(path, BasicRow) diff --git a/unwind/models.py b/unwind/models.py index 674337d..70ffe26 100644 --- a/unwind/models.py +++ b/unwind/models.py @@ -91,7 +91,7 @@ def _id(x: T) -> T: def asplain( - o: object, *, filter_fields: Container[str] = None, serialize: bool = False + o: object, *, filter_fields: Container[str] | None = None, serialize: bool = False ) -> dict[str, Any]: """Return the given model instance as `dict` with JSON compatible plain datatypes. diff --git a/unwind/request.py b/unwind/request.py index 0b6e07c..3b78872 100644 --- a/unwind/request.py +++ b/unwind/request.py @@ -5,7 +5,7 @@ import os import tempfile from collections import deque from contextlib import contextmanager -from dataclasses import dataclass +from dataclasses import dataclass, field from functools import wraps from hashlib import md5 from pathlib import Path @@ -75,10 +75,10 @@ def Session() -> requests.Session: def throttle( - times: int, per_seconds: float, jitter: Callable[[], float] = None + times: int, per_seconds: float, jitter: Callable[[], float] | None = None ) -> Callable[[Callable], Callable]: - calls: Deque[float] = deque(maxlen=times) + calls: deque[float] = deque(maxlen=times) if jitter is None: jitter = lambda: 0.0 @@ -128,7 +128,7 @@ class CachedResponse: status_code: int text: str url: str - headers: dict[str, str] = None + headers: dict[str, str] = field(default_factory=dict) def json(self): return json.loads(self.text) @@ -215,17 +215,19 @@ def last_modified_from_file(path: Path): def download( url: str, - file_path: Path | str = None, + file_path: Path | str | None = None, *, - replace_existing: bool = None, + replace_existing: bool | None = None, only_if_newer: bool = False, - timeout: float = None, + timeout: float | None = None, verify_ssl: bool = True, chunk_callback=None, response_callback=None, -): +) -> bytes | None: """Download a file. + If `file_path` is `None` return the remote content, otherwise write the + content to the given file path. Existing files will not be overwritten unless `replace_existing` is set. Setting `only_if_newer` will check if the remote file is newer than the local file, otherwise the download will be aborted. diff --git a/unwind/utils.py b/unwind/utils.py index 012d1fb..efe9f17 100644 --- a/unwind/utils.py +++ b/unwind/utils.py @@ -17,7 +17,10 @@ def b64padded(s: str) -> str: def phc_scrypt( - secret: bytes, *, salt: bytes = None, params: dict[Literal["n", "r", "p"], int] = {} + secret: bytes, + *, + salt: bytes | None = None, + params: dict[Literal["n", "r", "p"], int] = {}, ) -> str: """Return the scrypt expanded secret in PHC string format. diff --git a/unwind/web.py b/unwind/web.py index b8705a1..dbe39bc 100644 --- a/unwind/web.py +++ b/unwind/web.py @@ -85,11 +85,14 @@ def truthy(s: str): return bool(s) and s.lower() in {"1", "yes", "true"} -def yearcomp(s: str): +_Yearcomp = Literal["<", "=", ">"] + + +def yearcomp(s: str) -> tuple[_Yearcomp, int] | None: if not s: return - comp: Literal["<", "=", ">"] = "=" + comp: _Yearcomp = "=" if (prefix := s[0]) in "<=>": comp = prefix # type: ignore s = s[len(prefix) :] @@ -97,7 +100,9 @@ def yearcomp(s: str): return comp, int(s) -def as_int(x, *, max: int = None, min: int | None = 1, default: int = None): +def as_int( + x, *, max: int | None = None, min: int | None = 1, default: int | None = None +) -> int: try: if not isinstance(x, int): x = int(x) @@ -135,7 +140,7 @@ async def json_from_body(request, keys: list[str]) -> list: ... -async def json_from_body(request, keys: list[str] = None): +async def json_from_body(request, keys: list[str] | None = None): if not await request.body(): data = {} @@ -176,7 +181,7 @@ async def auth_user(request) -> User | None: _routes = [] -def route(path: str, *, methods: list[str] = None, **kwds): +def route(path: str, *, methods: list[str] | None = None, **kwds): def decorator(func): r = Route(path, func, methods=methods, **kwds) _routes.append(r) diff --git a/unwind/web_models.py b/unwind/web_models.py index e514c5f..6e83e1d 100644 --- a/unwind/web_models.py +++ b/unwind/web_models.py @@ -20,7 +20,7 @@ class Rating: user_score: Score100 | None @classmethod - def from_movie(cls, movie: models.Movie, *, rating: models.Rating = None): + def from_movie(cls, movie: models.Movie, *, rating: models.Rating | None = None): return cls( canonical_title=movie.title, imdb_score=movie.imdb_score, From 8e1988eea2a6088bf21f163025dee72cb1f49345 Mon Sep 17 00:00:00 2001 From: ducklet Date: Fri, 3 Feb 2023 22:10:11 +0100 Subject: [PATCH 04/25] add Autoflake linting --- poetry.lock | 29 ++++++++++++++++++++++++++++- pyproject.toml | 17 +++++++++++------ scripts/lint-py | 3 ++- 3 files changed, 41 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index a3c5a5b..4f60174 100644 --- a/poetry.lock +++ b/poetry.lock @@ -52,6 +52,21 @@ docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib- tests = ["attrs[tests-no-zope]", "zope.interface"] tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] +[[package]] +name = "autoflake" +version = "2.0.1" +description = "Removes unused imports and unused variables" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "autoflake-2.0.1-py3-none-any.whl", hash = "sha256:143b0843667734af53532c443e950c787316b9b1155b2273558260b44836e8e4"}, + {file = "autoflake-2.0.1.tar.gz", hash = "sha256:1ce520131b7f396915242fe91e57221f4d42408529bbe3ae93adafed286591e0"}, +] + +[package.dependencies] +pyflakes = ">=3.0.0" + [[package]] name = "beautifulsoup4" version = "4.11.2" @@ -515,6 +530,18 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pyflakes" +version = "3.0.1" +description = "passive checker of Python programs" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, + {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, +] + [[package]] name = "pyright" version = "1.1.292" @@ -806,4 +833,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "5406c84bd8bd69384b125fbcb104f855df4530427a9568b72f5d623853d5b593" +content-hash = "38adf8f83af28cc5d2fcc4a17dbfed2ec1026d7c297a3af8be350669790635cb" diff --git a/pyproject.toml b/pyproject.toml index 623ed2d..027bf45 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,12 +14,7 @@ starlette = "^0.23.1" ulid-py = "^1.1.0" databases = {extras = ["sqlite"], version = "^0.7.0"} uvicorn = "^0.20.0" - -# [tool.poetry.group.fixes.dependencies] -# # `databases` is having issues with new versions of SQLAlchemy 1.4, -# # and `greenlet` is also always a pain. -# SQLAlchemy = "1.4.25" -# greenlet = "1.1.2" +autoflake = "^2.0.1" [tool.poetry.group.dev] optional = true @@ -37,3 +32,13 @@ build-backend = "poetry.core.masonry.api" [tool.pyright] pythonVersion = "3.11" + +[tool.isort] +profile = "black" + +[tool.autoflake] +remove-duplicate-keys = true +remove-unused-variables = true +remove-all-unused-imports = true +ignore-init-module-imports = true +ignore-pass-after-docstring = true diff --git a/scripts/lint-py b/scripts/lint-py index 84a4c65..b2040d7 100755 --- a/scripts/lint-py +++ b/scripts/lint-py @@ -4,6 +4,7 @@ cd "$RUN_DIR" [ -z "${DEBUG:-}" ] || set -x -isort --profile black unwind +autoflake --quiet --check --recursive unwind +isort unwind black unwind pyright From 8a8bfce89de23d987386a35b659532bbac373788 Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 01:12:50 +0100 Subject: [PATCH 05/25] apply Black v23.1.0 formatting changes --- unwind/db.py | 1 - unwind/imdb.py | 5 ----- unwind/imdb_import.py | 5 +---- unwind/models.py | 3 --- unwind/request.py | 6 +----- unwind/utils.py | 2 +- unwind/web.py | 16 ---------------- 7 files changed, 3 insertions(+), 35 deletions(-) diff --git a/unwind/db.py b/unwind/db.py index e32d4c0..4a4fc68 100644 --- a/unwind/db.py +++ b/unwind/db.py @@ -119,7 +119,6 @@ async def apply_db_patches(db: Database): raise RuntimeError("No statement found.") async with db.transaction(): - for query in queries: await db.execute(query) diff --git a/unwind/imdb.py b/unwind/imdb.py index 9288e6f..5b56b3f 100644 --- a/unwind/imdb.py +++ b/unwind/imdb.py @@ -35,12 +35,10 @@ log = logging.getLogger(__name__) async def refresh_user_ratings_from_imdb(stop_on_dupe=True): - with session() as s: s.headers["Accept-Language"] = "en-US, en;q=0.5" for user in await db.get_all(User): - log.info("⚡️ Loading data for %s ...", user.name) try: @@ -97,7 +95,6 @@ find_movie_id = re.compile(r"/title/(?Ptt\d+)/").search def movie_and_rating_from_item(item) -> tuple[Movie, Rating]: - genres = (genre := item.find("span", "genre")) and genre.string or "" movie = Movie( title=item.h3.a.string.strip(), @@ -169,7 +166,6 @@ async def parse_page(url) -> tuple[list[Rating], str | None]: items = soup.find_all("div", "lister-item-content") for i, item in enumerate(items): - try: movie, rating = movie_and_rating_from_item(item) except Exception as err: @@ -199,7 +195,6 @@ async def load_ratings(user_id): next_url = user_ratings_url(user_id) while next_url: - ratings, next_url = await parse_page(next_url) for i, rating in enumerate(ratings): diff --git a/unwind/imdb_import.py b/unwind/imdb_import.py index 3557993..3991a78 100644 --- a/unwind/imdb_import.py +++ b/unwind/imdb_import.py @@ -111,11 +111,10 @@ def gz_mtime(path: Path) -> datetime: def count_lines(path: Path) -> int: i = 0 - one_mb = 2 ** 20 + one_mb = 2**20 buf_size = 8 * one_mb # 8 MiB seems to give a good read/process performance. with gzip.open(path, "rt") as f: - while buf := f.read(buf_size): i += buf.count("\n") @@ -138,7 +137,6 @@ def read_imdb_tsv( def read_imdb_tsv(path: Path, row_type, *, unpack=True): with gzip.open(path, "rt", newline="") as f: - rows = csv.reader(f, delimiter="\t", quoting=csv.QUOTE_NONE) # skip header line @@ -205,7 +203,6 @@ async def import_from_file(*, basics_path: Path, ratings_path: Path): chunk = [] for i, m in enumerate(read_basics(basics_path)): - perc = 100 * i / total if perc >= perc_next_report: await db.set_import_progress(perc) diff --git a/unwind/models.py b/unwind/models.py index 70ffe26..8922e5b 100644 --- a/unwind/models.py +++ b/unwind/models.py @@ -41,7 +41,6 @@ def fields(class_or_instance): # XXX this might be a little slow (not sure), if so, memoize for f in _fields(class_or_instance): - if f.name == "_is_lazy": continue @@ -108,7 +107,6 @@ def asplain( d: JSONObject = {} for f in fields(o): - if filter_fields is not None and f.name not in filter_fields: continue @@ -156,7 +154,6 @@ def fromplain(cls: Type[T], d: dict[str, Any], *, serialized: bool = False) -> T dd: JSONObject = {} for f in fields(cls): - target = f.type otype = optional_type(f.type) is_opt = otype is not None diff --git a/unwind/request.py b/unwind/request.py index 3b78872..c4866ea 100644 --- a/unwind/request.py +++ b/unwind/request.py @@ -77,7 +77,6 @@ def Session() -> requests.Session: def throttle( times: int, per_seconds: float, jitter: Callable[[], float] | None = None ) -> Callable[[Callable], Callable]: - calls: deque[float] = deque(maxlen=times) if jitter is None: @@ -86,7 +85,6 @@ def throttle( def decorator(func: Callable) -> Callable: @wraps(func) def inner(*args, **kwds): - # clean up while calls: if calls[0] + per_seconds > time(): @@ -151,7 +149,6 @@ def cache_path(req) -> Path | None: @throttle(1, 1, random) def http_get(s: requests.Session, url: str, *args, **kwds) -> requests.Response: - req = s.prepare_request(requests.Request("GET", url, *args, **kwds)) cachefile = cache_path(req) if config.debug else None @@ -244,7 +241,6 @@ def download( raise FileExistsError(23, "Would replace existing file", str(file_path)) with session() as s: - headers = {} if file_exists and only_if_newer: assert file_path @@ -300,7 +296,7 @@ def download( tempfd, tempfile_path = tempfile.mkstemp( dir=tempdir, prefix=f".download-{file_path.name}." ) - one_mb = 2 ** 20 + one_mb = 2**20 chunk_size = 8 * one_mb try: log.debug("💾 Writing to temp file %s ...", tempfile_path) diff --git a/unwind/utils.py b/unwind/utils.py index efe9f17..f253bde 100644 --- a/unwind/utils.py +++ b/unwind/utils.py @@ -33,7 +33,7 @@ def phc_scrypt( if salt is None: salt = secrets.token_bytes(16) - n = params.get("n", 2 ** 14) # CPU/Memory cost factor + n = params.get("n", 2**14) # CPU/Memory cost factor r = params.get("r", 8) # block size p = params.get("p", 1) # parallelization factor # maxmem = 2 * 128 * n * r * p diff --git a/unwind/web.py b/unwind/web.py index dbe39bc..cab9a9e 100644 --- a/unwind/web.py +++ b/unwind/web.py @@ -195,7 +195,6 @@ route.registered = _routes @route("/groups/{group_id}/ratings") async def get_ratings_for_group(request): - group_id = as_ulid(request.path_params["group_id"]) group = await db.get(Group, id=str(group_id)) @@ -256,7 +255,6 @@ def not_implemented(): @route("/movies") @requires(["authenticated"]) async def list_movies(request): - params = request.query_params user = await auth_user(request) @@ -324,7 +322,6 @@ async def list_movies(request): @route("/movies", methods=["POST"]) @requires(["authenticated", "admin"]) async def add_movie(request): - not_implemented() @@ -366,7 +363,6 @@ _import_lock = asyncio.Lock() @route("/movies/_reload_imdb", methods=["POST"]) @requires(["authenticated", "admin"]) async def load_imdb_movies(request): - params = request.query_params force = truthy(params.get("force")) @@ -389,7 +385,6 @@ async def load_imdb_movies(request): @route("/users") @requires(["authenticated", "admin"]) async def list_users(request): - users = await db.get_all(User) return JSONResponse([asplain(u) for u in users]) @@ -398,7 +393,6 @@ async def list_users(request): @route("/users", methods=["POST"]) @requires(["authenticated", "admin"]) async def add_user(request): - name, imdb_id = await json_from_body(request, ["name", "imdb_id"]) # XXX restrict name @@ -420,7 +414,6 @@ async def add_user(request): @route("/users/{user_id}") @requires(["authenticated"]) async def show_user(request): - user_id = as_ulid(request.path_params["user_id"]) if is_admin(request): @@ -449,7 +442,6 @@ async def show_user(request): @route("/users/{user_id}", methods=["DELETE"]) @requires(["authenticated", "admin"]) async def remove_user(request): - user_id = as_ulid(request.path_params["user_id"]) user = await db.get(User, id=str(user_id)) @@ -467,7 +459,6 @@ async def remove_user(request): @route("/users/{user_id}", methods=["PATCH"]) @requires(["authenticated"]) async def modify_user(request): - user_id = as_ulid(request.path_params["user_id"]) if is_admin(request): @@ -515,7 +506,6 @@ async def modify_user(request): @route("/users/{user_id}/groups", methods=["POST"]) @requires(["authenticated", "admin"]) async def add_group_to_user(request): - user_id = as_ulid(request.path_params["user_id"]) user = await db.get(User, id=str(user_id)) @@ -540,21 +530,18 @@ async def add_group_to_user(request): @route("/users/{user_id}/ratings") @requires(["private"]) async def ratings_for_user(request): - not_implemented() @route("/users/{user_id}/ratings", methods=["PUT"]) @requires("authenticated") async def set_rating_for_user(request): - not_implemented() @route("/users/_reload_ratings", methods=["POST"]) @requires(["authenticated", "admin"]) async def load_imdb_user_ratings(request): - ratings = [rating async for rating in imdb.refresh_user_ratings_from_imdb()] return JSONResponse({"new_ratings": [asplain(r) for r in ratings]}) @@ -563,7 +550,6 @@ async def load_imdb_user_ratings(request): @route("/groups") @requires(["authenticated", "admin"]) async def list_groups(request): - groups = await db.get_all(Group) return JSONResponse([asplain(g) for g in groups]) @@ -572,7 +558,6 @@ async def list_groups(request): @route("/groups", methods=["POST"]) @requires(["authenticated", "admin"]) async def add_group(request): - (name,) = await json_from_body(request, ["name"]) # XXX restrict name @@ -586,7 +571,6 @@ async def add_group(request): @route("/groups/{group_id}/users", methods=["POST"]) @requires(["authenticated"]) async def add_user_to_group(request): - group_id = as_ulid(request.path_params["group_id"]) group = await db.get(Group, id=str(group_id)) From 8eabbf89d5b84eff9d58f26288ad6ff660020a62 Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 01:13:17 +0100 Subject: [PATCH 06/25] add .git-blame-ignore-revs --- .git-blame-ignore-revs | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000..3104b77 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +# Apply Black v23.1.0 formatting changes. +8a8bfce89de23d987386a35b659532bbac373788 From 758706baa20d574b6597644f61d4ea2124e4388c Mon Sep 17 00:00:00 2001 From: ducklet Date: Fri, 3 Feb 2023 23:44:09 +0100 Subject: [PATCH 07/25] switch from Requests to HTTPX --- poetry.lock | 183 ++++++++++++++-------------------------------- pyproject.toml | 2 +- unwind/request.py | 92 +++++++++-------------- 3 files changed, 92 insertions(+), 185 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4f60174..a5184f2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -146,104 +146,6 @@ files = [ {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, ] -[[package]] -name = "charset-normalizer" -version = "3.0.1" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" -optional = false -python-versions = "*" -files = [ - {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, - {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, -] - [[package]] name = "click" version = "8.1.3" @@ -405,6 +307,52 @@ chardet = ["chardet (>=2.2)"] genshi = ["genshi"] lxml = ["lxml"] +[[package]] +name = "httpcore" +version = "0.16.3" +description = "A minimal low-level HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, + {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, +] + +[package.dependencies] +anyio = ">=3.0,<5.0" +certifi = "*" +h11 = ">=0.13,<0.15" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "httpx" +version = "0.23.3" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, + {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, +] + +[package.dependencies] +certifi = "*" +httpcore = ">=0.15.0,<0.17.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + [[package]] name = "idna" version = "3.4" @@ -603,26 +551,22 @@ docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] [[package]] -name = "requests" -version = "2.28.2" -description = "Python HTTP for Humans." +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" category = "main" optional = false -python-versions = ">=3.7, <4" +python-versions = "*" files = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, ] [package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} [package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +idna2008 = ["idna"] [[package]] name = "setuptools" @@ -782,23 +726,6 @@ files = [ {file = "ulid_py-1.1.0-py2.py3-none-any.whl", hash = "sha256:b56a0f809ef90d6020b21b89a87a48edc7c03aea80e5ed5174172e82d76e3987"}, ] -[[package]] -name = "urllib3" -version = "1.26.14" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, - {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - [[package]] name = "uvicorn" version = "0.20.0" @@ -833,4 +760,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "38adf8f83af28cc5d2fcc4a17dbfed2ec1026d7c297a3af8be350669790635cb" +content-hash = "05dc90985d69552657feca4adf2827f52a8e156eb77eda65cf6775357bed5472" diff --git a/pyproject.toml b/pyproject.toml index 027bf45..f357688 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,6 @@ license = "LOL" [tool.poetry.dependencies] python = "^3.11" -requests = "^2.25.1" beautifulsoup4 = "^4.9.3" html5lib = "^1.1" starlette = "^0.23.1" @@ -15,6 +14,7 @@ ulid-py = "^1.1.0" databases = {extras = ["sqlite"], version = "^0.7.0"} uvicorn = "^0.20.0" autoflake = "^2.0.1" +httpx = "^0.23.3" [tool.poetry.group.dev] optional = true diff --git a/unwind/request.py b/unwind/request.py index c4866ea..b3aed67 100644 --- a/unwind/request.py +++ b/unwind/request.py @@ -11,12 +11,10 @@ from hashlib import md5 from pathlib import Path from random import random from time import sleep, time -from typing import Callable +from typing import Callable, cast import bs4 -import requests -from requests.status_codes import codes -from urllib3.util.retry import Retry +import httpx from . import config @@ -26,25 +24,11 @@ if config.debug and config.cachedir: config.cachedir.mkdir(exist_ok=True) -def set_retries(s: requests.Session, n: int, backoff_factor: float = 0.2): - retry = ( - Retry( - total=n, - connect=n, - read=n, - status=n, - status_forcelist=Retry.RETRY_AFTER_STATUS_CODES, - backoff_factor=backoff_factor, - ) - if n - else Retry(0, read=False) - ) - for a in s.adapters.values(): - a.max_retries = retry - - _shared_session = None +_Session_T = httpx.Client +_Response_T = httpx.Response + @contextmanager def session(): @@ -61,20 +45,20 @@ def session(): yield _shared_session return - _shared_session = Session() + _shared_session = _Session() try: yield _shared_session finally: _shared_session = None -def Session() -> requests.Session: - s = requests.Session() - s.headers["User-Agent"] = "Mozilla/5.0 Gecko/20100101 unwind/20210506" +def _Session() -> _Session_T: + s = _Session_T() + s.headers["user-agent"] = "Mozilla/5.0 Gecko/20100101 unwind/20230203" return s -def throttle( +def _throttle( times: int, per_seconds: float, jitter: Callable[[], float] | None = None ) -> Callable[[Callable], Callable]: calls: deque[float] = deque(maxlen=times) @@ -116,12 +100,8 @@ def throttle( return decorator -class CachedStr(str): - is_cached = True - - @dataclass -class CachedResponse: +class _CachedResponse: is_cached = True status_code: int text: str @@ -132,7 +112,7 @@ class CachedResponse: return json.loads(self.text) -class RedirectError(RuntimeError): +class _RedirectError(RuntimeError): def __init__(self, from_url: str, to_url: str, is_cached=False): self.from_url = from_url self.to_url = to_url @@ -147,9 +127,9 @@ def cache_path(req) -> Path | None: return config.cachedir / md5(sig.encode()).hexdigest() -@throttle(1, 1, random) -def http_get(s: requests.Session, url: str, *args, **kwds) -> requests.Response: - req = s.prepare_request(requests.Request("GET", url, *args, **kwds)) +@_throttle(1, 1, random) +def _http_get(s: _Session_T, url: str, *args, **kwds) -> _Response_T: + req = s.build_request(method="GET", url=url, *args, **kwds) cachefile = cache_path(req) if config.debug else None @@ -159,17 +139,19 @@ def http_get(s: requests.Session, url: str, *args, **kwds) -> requests.Response: f"💾 loading {req.url} ({req.headers!a}) from cache {cachefile} ..." ) with cachefile.open() as fp: - resp = CachedResponse(**json.load(fp)) + resp = _CachedResponse(**json.load(fp)) if 300 <= resp.status_code <= 399: - raise RedirectError( + raise _RedirectError( from_url=resp.url, to_url=resp.headers["location"], is_cached=True ) - return resp + return cast(_Response_T, resp) log.debug(f"⚡️ loading {req.url} ({req.headers!a}) ...") - resp = s.send(req, allow_redirects=False, stream=True) + resp = s.send(req, follow_redirects=False, stream=True) resp.raise_for_status() + resp.read() # Download the response stream to allow `resp.text` access. + if cachefile: with cachefile.open("w") as fp: json.dump( @@ -184,7 +166,7 @@ def http_get(s: requests.Session, url: str, *args, **kwds) -> requests.Response: if resp.is_redirect: # Redirects could mean trouble, we need to stay on top of that! - raise RedirectError(from_url=resp.url, to_url=resp.headers["location"]) + raise _RedirectError(from_url=str(resp.url), to_url=resp.headers["location"]) return resp @@ -192,21 +174,21 @@ def http_get(s: requests.Session, url: str, *args, **kwds) -> requests.Response: def soup_from_url(url): """Return a BeautifulSoup instance from the contents for the given URL.""" with session() as s: - r = http_get(s, url) + r = _http_get(s, url) soup = bs4.BeautifulSoup(r.text, "html5lib") return soup -def last_modified_from_response(resp): - if last_mod := resp.headers.get("Last-Modified"): +def _last_modified_from_response(resp: _Response_T) -> float | None: + if last_mod := resp.headers.get("last-modified"): try: return email.utils.parsedate_to_datetime(last_mod).timestamp() except: log.exception("🐛 Received invalid value for Last-Modified: %s", last_mod) -def last_modified_from_file(path: Path): +def _last_modified_from_file(path: Path) -> float: return path.stat().st_mtime @@ -217,7 +199,6 @@ def download( replace_existing: bool | None = None, only_if_newer: bool = False, timeout: float | None = None, - verify_ssl: bool = True, chunk_callback=None, response_callback=None, ) -> bytes | None: @@ -244,17 +225,15 @@ def download( headers = {} if file_exists and only_if_newer: assert file_path - file_lastmod = last_modified_from_file(file_path) - headers["If-Modified-Since"] = email.utils.formatdate( + file_lastmod = _last_modified_from_file(file_path) + headers["if-modified-since"] = email.utils.formatdate( file_lastmod, usegmt=True ) - req = s.prepare_request(requests.Request("GET", url, headers=headers)) + req = s.build_request(method="GET", url=url, headers=headers, timeout=timeout) log.debug("⚡️ loading %s (%s) ...", req.url, req.headers) - resp = s.send( - req, allow_redirects=True, stream=True, timeout=timeout, verify=verify_ssl - ) + resp = s.send(req, follow_redirects=True, stream=True) if response_callback is not None: try: @@ -264,18 +243,19 @@ def download( log.debug("☕️ Response status: %s; headers: %s", resp.status_code, resp.headers) - resp.raise_for_status() - - if resp.status_code == codes.not_modified: + if resp.status_code == httpx.codes.NOT_MODIFIED: log.debug("✋ Remote file has not changed, skipping download.") return + resp.raise_for_status() + if file_path is None: + resp.read() # Download the response stream to allow `resp.content` access. return resp.content assert replace_existing is True - resp_lastmod = last_modified_from_response(resp) + resp_lastmod = _last_modified_from_response(resp) # Check Last-Modified in case the server ignored If-Modified-Since. # XXX also check Content-Length? @@ -300,7 +280,7 @@ def download( chunk_size = 8 * one_mb try: log.debug("💾 Writing to temp file %s ...", tempfile_path) - for chunk in resp.iter_content(chunk_size=chunk_size, decode_unicode=False): + for chunk in resp.iter_bytes(chunk_size): os.write(tempfd, chunk) if chunk_callback: try: From 0563d49dbc575a12c9a8885f85b8d2d2ea9aff3e Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 01:02:16 +0100 Subject: [PATCH 08/25] add async file download function --- unwind/request.py | 154 +++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 153 insertions(+), 1 deletion(-) diff --git a/unwind/request.py b/unwind/request.py index b3aed67..057cf7b 100644 --- a/unwind/request.py +++ b/unwind/request.py @@ -4,7 +4,7 @@ import logging import os import tempfile from collections import deque -from contextlib import contextmanager +from contextlib import asynccontextmanager, contextmanager from dataclasses import dataclass, field from functools import wraps from hashlib import md5 @@ -24,8 +24,10 @@ if config.debug and config.cachedir: config.cachedir.mkdir(exist_ok=True) +_shared_asession = None _shared_session = None +_ASession_T = httpx.AsyncClient _Session_T = httpx.Client _Response_T = httpx.Response @@ -58,6 +60,32 @@ def _Session() -> _Session_T: return s +@asynccontextmanager +async def asession(): + """Return the shared request session. + + The session is shared by all request functions and provides cookie + persistence and connection pooling. + Opening the session before making a request allows you to set headers + or change the retry behavior. + """ + global _shared_asession + + if _shared_asession: + yield _shared_asession + return + + _shared_asession = _ASession_T() + _shared_asession.headers[ + "user-agent" + ] = "Mozilla/5.0 Gecko/20100101 unwind/20230203" + try: + async with _shared_asession: + yield _shared_asession + finally: + _shared_asession = None + + def _throttle( times: int, per_seconds: float, jitter: Callable[[], float] | None = None ) -> Callable[[Callable], Callable]: @@ -298,3 +326,127 @@ def download( # Fix file attributes. if resp_lastmod is not None: os.utime(file_path, (resp_lastmod, resp_lastmod)) + + +async def adownload( + url: str, + *, + to_path: Path | str | None = None, + replace_existing: bool | None = None, + only_if_newer: bool = False, + timeout: float | None = None, + chunk_callback=None, + response_callback=None, +) -> bytes | None: + """Download a file. + + If `to_path` is `None` return the remote content, otherwise write the + content to the given file path. + Existing files will not be overwritten unless `replace_existing` is set. + Setting `only_if_newer` will check if the remote file is newer than the + local file, otherwise the download will be aborted. + """ + if replace_existing is None: + replace_existing = only_if_newer + + file_exists = None + if to_path is not None: + to_path = Path(to_path) + + file_exists = to_path.exists() and to_path.stat().st_size + if file_exists and not replace_existing: + raise FileExistsError(23, "Would replace existing file", str(to_path)) + + async with asession() as s: + headers = {} + if file_exists and only_if_newer: + assert to_path + file_lastmod = _last_modified_from_file(to_path) + headers["if-modified-since"] = email.utils.formatdate( + file_lastmod, usegmt=True + ) + + req = s.build_request(method="GET", url=url, headers=headers, timeout=timeout) + + log.debug("⚡️ Loading %s (%a) ...", req.url, dict(req.headers)) + resp = await s.send(req, follow_redirects=True, stream=True) + + try: + if response_callback is not None: + try: + response_callback(resp) + except: + log.exception("🐛 Error in response callback.") + + log.debug( + "☕️ %s -> status: %s; headers: %a", + req.url, + resp.status_code, + dict(resp.headers), + ) + + if resp.status_code == httpx.codes.NOT_MODIFIED: + log.debug( + "✋ Remote file has not changed, skipping download: %s -> %a", + req.url, + to_path, + ) + return + + resp.raise_for_status() + + if to_path is None: + await resp.aread() # Download the response stream to allow `resp.content` access. + return resp.content + + resp_lastmod = _last_modified_from_response(resp) + + # Check Last-Modified in case the server ignored If-Modified-Since. + # XXX also check Content-Length? + if file_exists and only_if_newer and resp_lastmod is not None: + assert file_lastmod + + if resp_lastmod <= file_lastmod: + log.debug("✋ Local file is newer, skipping download: %a", req.url) + return + + # Create intermediate directories if necessary. + download_dir = to_path.parent + download_dir.mkdir(parents=True, exist_ok=True) + + # Write content to temp file. + tempdir = download_dir + tempfd, tempfile_path = tempfile.mkstemp( + dir=tempdir, prefix=f".download-{to_path.name}." + ) + one_mb = 2**20 + chunk_size = 8 * one_mb + try: + log.debug("💾 Writing to temp file %s ...", tempfile_path) + async for chunk in resp.aiter_bytes(chunk_size): + os.write(tempfd, chunk) + if chunk_callback: + try: + chunk_callback(chunk) + except: + log.exception("🐛 Error in chunk callback.") + finally: + os.close(tempfd) + + # Move downloaded file to destination. + if to_path.exists(): + log.debug("💾 Replacing existing file: %s", to_path) + else: + log.debug("💾 Move to destination: %s", to_path) + if replace_existing: + Path(tempfile_path).replace(to_path) + else: + Path(tempfile_path).rename(to_path) + + # Fix file attributes. + if resp_lastmod is not None: + log.debug("💾 Adjusting file timestamp: %s (%s)", to_path, resp_lastmod) + os.utime(to_path, (resp_lastmod, resp_lastmod)) + + finally: + await resp.aclose() From 7da3a094f1f2acd08c1f11a3b0132f6d04b6a5a2 Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 01:03:12 +0100 Subject: [PATCH 09/25] add "download-imdb-dataset" command to CLI --- unwind/__main__.py | 27 ++++++++++++++++++++++++++- unwind/imdb_import.py | 25 +++++++++++++++++++------ 2 files changed, 45 insertions(+), 7 deletions(-) diff --git a/unwind/__main__.py b/unwind/__main__.py index 983ede7..e1689a9 100644 --- a/unwind/__main__.py +++ b/unwind/__main__.py @@ -6,7 +6,7 @@ from pathlib import Path from . import config from .db import close_connection_pool, open_connection_pool from .imdb import refresh_user_ratings_from_imdb -from .imdb_import import import_from_file +from .imdb_import import download_datasets, import_from_file log = logging.getLogger(__name__) @@ -31,6 +31,10 @@ async def run_import_imdb_dataset(basics_path: Path, ratings_path: Path): await close_connection_pool() +async def run_download_imdb_dataset(basics_path: Path, ratings_path: Path): + await download_datasets(basics_path=basics_path, ratings_path=ratings_path) + + def getargs(): parser = argparse.ArgumentParser() commands = parser.add_subparsers(required=True) @@ -55,6 +59,25 @@ def getargs(): "--ratings", metavar="ratings_file.tsv.gz", type=Path, required=True ) + parser_download_imdb_dataset = commands.add_parser( + "download-imdb-dataset", + help="Download IMDb datasets.", + description=""" + Download IMDb datasets. + """, + ) + parser_download_imdb_dataset.add_argument( + dest="mode", + action="store_const", + const="download-imdb-dataset", + ) + parser_download_imdb_dataset.add_argument( + "--basics", metavar="basics_file.tsv.gz", type=Path, required=True + ) + parser_download_imdb_dataset.add_argument( + "--ratings", metavar="ratings_file.tsv.gz", type=Path, required=True + ) + parser_load_user_ratings_from_imdb = commands.add_parser( "load-user-ratings-from-imdb", help="Load user ratings from imdb.com.", @@ -94,6 +117,8 @@ def main(): asyncio.run(run_load_user_ratings_from_imdb()) elif args.mode == "import-imdb-dataset": asyncio.run(run_import_imdb_dataset(args.basics, args.ratings)) + elif args.mode == "download-imdb-dataset": + asyncio.run(run_download_imdb_dataset(args.basics, args.ratings)) main() diff --git a/unwind/imdb_import.py b/unwind/imdb_import.py index 3991a78..7e55b62 100644 --- a/unwind/imdb_import.py +++ b/unwind/imdb_import.py @@ -1,3 +1,4 @@ +import asyncio import csv import gzip import logging @@ -236,7 +237,23 @@ async def import_from_file(*, basics_path: Path, ratings_path: Path): await db.set_import_progress(100) -async def load_from_web(*, force: bool = False): +async def download_datasets(*, basics_path: Path, ratings_path: Path) -> None: + """Download IMDb movie database dumps. + + See https://www.imdb.com/interfaces/ and https://datasets.imdbws.com/ for + more information on the IMDb database dumps. + """ + basics_url = "https://datasets.imdbws.com/title.basics.tsv.gz" + ratings_url = "https://datasets.imdbws.com/title.ratings.tsv.gz" + + async with request.asession(): + await asyncio.gather( + request.adownload(ratings_url, to_path=ratings_path, only_if_newer=True), + request.adownload(basics_url, to_path=basics_path, only_if_newer=True), + ) + + +async def load_from_web(*, force: bool = False) -> None: """Refresh the full IMDb movie database. The latest dumps are first downloaded and then imported into the database. @@ -249,17 +266,13 @@ async def load_from_web(*, force: bool = False): await db.set_import_progress(0) try: - basics_url = "https://datasets.imdbws.com/title.basics.tsv.gz" - ratings_url = "https://datasets.imdbws.com/title.ratings.tsv.gz" ratings_file = config.datadir / "imdb/title.ratings.tsv.gz" basics_file = config.datadir / "imdb/title.basics.tsv.gz" ratings_mtime = ratings_file.stat().st_mtime if ratings_file.exists() else None bastics_mtime = basics_file.stat().st_mtime if basics_file.exists() else None - with request.session(): - request.download(ratings_url, ratings_file, only_if_newer=True) - request.download(basics_url, basics_file, only_if_newer=True) + await download_datasets(basics_path=basics_file, ratings_path=ratings_file) is_changed = ( ratings_mtime != ratings_file.stat().st_mtime From e84a6bc865a1f116f800567a24a85ed7b28b6fd2 Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 12:46:30 +0100 Subject: [PATCH 10/25] fix support for union type expressions --- unwind/models.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/unwind/models.py b/unwind/models.py index 8922e5b..030b87e 100644 --- a/unwind/models.py +++ b/unwind/models.py @@ -3,6 +3,7 @@ from dataclasses import dataclass, field from dataclasses import fields as _fields from datetime import datetime, timezone from functools import partial +from types import UnionType from typing import ( Annotated, Any, @@ -52,7 +53,7 @@ def fields(class_or_instance): def is_optional(tp: Type) -> bool: """Return wether the given type is optional.""" - if get_origin(tp) is not Union: + if not isinstance(tp, UnionType) and get_origin(tp) is not Union: return False args = get_args(tp) @@ -66,7 +67,7 @@ def optional_type(tp: Type) -> Type | None: Since they're equivalent this also works for other optioning notations, like `Union[int, None]` and `int | None`. """ - if get_origin(tp) is not Union: + if not isinstance(tp, UnionType) and get_origin(tp) is not Union: return None args = get_args(tp) @@ -184,7 +185,8 @@ def validate(o: object) -> None: vtype = type(getattr(o, f.name)) if vtype is not f.type: if get_origin(f.type) is vtype or ( - get_origin(f.type) is Union and vtype in get_args(f.type) + (isinstance(f.type, UnionType) or get_origin(f.type) is Union) + and vtype in get_args(f.type) ): continue raise ValueError(f"Invalid value type: {f.name}: {vtype}") From 69643455a66adabfaa6cc3917233bfb9de21442f Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 14:12:36 +0100 Subject: [PATCH 11/25] IMDb import: fix progress reporting Because we calculated the percentage based on the number of lines we need to yield once per line, otherwise the count is off. --- unwind/imdb_import.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/unwind/imdb_import.py b/unwind/imdb_import.py index 7e55b62..bccb18a 100644 --- a/unwind/imdb_import.py +++ b/unwind/imdb_import.py @@ -177,13 +177,14 @@ def read_ratings_as_mapping(path: Path): return {r[0]: (round(100 * (float(r[1]) - 1) / 9), int(r[2])) for r in rows} -def read_basics(path: Path): +def read_basics(path: Path) -> Generator[Movie | None, None, None]: mtime = gz_mtime(path) rows = read_imdb_tsv(path, BasicRow) for row in rows: if row.startYear is None: log.debug("Skipping movie, missing year: %s", row) + yield None continue m = row.as_movie() @@ -210,6 +211,9 @@ async def import_from_file(*, basics_path: Path, ratings_path: Path): log.info("⏳ Imported %s%%", round(perc, 1)) perc_next_report += perc_step + if m is None: + continue + if m.media_type not in { "Movie", "Short", @@ -234,6 +238,7 @@ async def import_from_file(*, basics_path: Path, ratings_path: Path): await add_or_update_many_movies(chunk) chunk = [] + log.info("👍 Imported 100%") await db.set_import_progress(100) From 7a7d619e64314a18c2b27c221df3e71b2df5a200 Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 13:18:46 +0100 Subject: [PATCH 12/25] IMDb import: fix assertion We should only assert where we know the result, here the input file could just as well be empty. --- unwind/imdb_import.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/unwind/imdb_import.py b/unwind/imdb_import.py index bccb18a..705db2f 100644 --- a/unwind/imdb_import.py +++ b/unwind/imdb_import.py @@ -198,7 +198,9 @@ async def import_from_file(*, basics_path: Path, ratings_path: Path): log.info("💾 Importing movies ...") total = count_lines(basics_path) - assert total != 0 + log.debug("Found %i movies.", total) + if total == 0: + raise RuntimeError(f"No movies found.") perc_next_report = 0.0 perc_step = 0.1 From cb7c66a8d195525e16a4cefcafd770d460afaa7e Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 14:56:09 +0100 Subject: [PATCH 13/25] add simple profiling script --- scripts/profile | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100755 scripts/profile diff --git a/scripts/profile b/scripts/profile new file mode 100755 index 0000000..ccbfc70 --- /dev/null +++ b/scripts/profile @@ -0,0 +1,13 @@ +#!/bin/sh -eu + +cd "$RUN_DIR" + +outfile="profile-$(date '+%Y%m%d-%H%M%S').txt" + +[ -z "${DEBUG:-}" ] || set -x + +echo "# Writing profiler stats to: $outfile" +python -m cProfile -o "$outfile" -m unwind "$@" + +echo "# Loading stats file: $outfile" +python -m pstats "$outfile" From 60d38e9b494c7560656d4aa57e93d0dcc6fa76ba Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 17:30:54 +0100 Subject: [PATCH 14/25] improve typing --- unwind/__main__.py | 2 +- unwind/imdb.py | 6 +++--- unwind/models.py | 2 +- unwind/request.py | 11 +++++++---- 4 files changed, 12 insertions(+), 9 deletions(-) diff --git a/unwind/__main__.py b/unwind/__main__.py index e1689a9..e802831 100644 --- a/unwind/__main__.py +++ b/unwind/__main__.py @@ -15,7 +15,7 @@ async def run_load_user_ratings_from_imdb(): await open_connection_pool() i = 0 - async for rating in refresh_user_ratings_from_imdb(): + async for _ in refresh_user_ratings_from_imdb(): i += 1 log.info("✨ Imported %s new ratings.", i) diff --git a/unwind/imdb.py b/unwind/imdb.py index 5b56b3f..6044d97 100644 --- a/unwind/imdb.py +++ b/unwind/imdb.py @@ -34,7 +34,7 @@ log = logging.getLogger(__name__) # p.text-muted.text-small span[name=nv] [data-value] -async def refresh_user_ratings_from_imdb(stop_on_dupe=True): +async def refresh_user_ratings_from_imdb(stop_on_dupe: bool = True): with session() as s: s.headers["Accept-Language"] = "en-US, en;q=0.5" @@ -149,7 +149,7 @@ def movie_and_rating_from_item(item) -> tuple[Movie, Rating]: ForgedRequest = namedtuple("ForgedRequest", "url headers") -async def parse_page(url) -> tuple[list[Rating], str | None]: +async def parse_page(url: str) -> tuple[list[Rating], str | None]: ratings = [] soup = soup_from_url(url) @@ -191,7 +191,7 @@ async def parse_page(url) -> tuple[list[Rating], str | None]: return (ratings, next_url if url != next_url else None) -async def load_ratings(user_id): +async def load_ratings(user_id: str): next_url = user_ratings_url(user_id) while next_url: diff --git a/unwind/models.py b/unwind/models.py index 030b87e..b34d6c3 100644 --- a/unwind/models.py +++ b/unwind/models.py @@ -19,7 +19,7 @@ from typing import ( from .types import ULID -JSON = Union[int, float, str, None, list["JSON"], dict[str, "JSON"]] +JSON = int | float | str | None | list["JSON"] | dict[str, "JSON"] JSONObject = dict[str, JSON] T = TypeVar("T") diff --git a/unwind/request.py b/unwind/request.py index 057cf7b..a4d1778 100644 --- a/unwind/request.py +++ b/unwind/request.py @@ -11,7 +11,7 @@ from hashlib import md5 from pathlib import Path from random import random from time import sleep, time -from typing import Callable, cast +from typing import Callable, ParamSpec, TypeVar, cast import bs4 import httpx @@ -31,6 +31,9 @@ _ASession_T = httpx.AsyncClient _Session_T = httpx.Client _Response_T = httpx.Response +_T = TypeVar("_T") +_P = ParamSpec("_P") + @contextmanager def session(): @@ -88,15 +91,15 @@ async def asession(): def _throttle( times: int, per_seconds: float, jitter: Callable[[], float] | None = None -) -> Callable[[Callable], Callable]: +) -> Callable[[Callable[_P, _T]], Callable[_P, _T]]: calls: deque[float] = deque(maxlen=times) if jitter is None: jitter = lambda: 0.0 - def decorator(func: Callable) -> Callable: + def decorator(func: Callable[_P, _T]) -> Callable[_P, _T]: @wraps(func) - def inner(*args, **kwds): + def inner(*args: _P.args, **kwds: _P.kwargs): # clean up while calls: if calls[0] + per_seconds > time(): From 099770c80c147f94c8f2ad6395f8698174f4ea83 Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 17:55:22 +0100 Subject: [PATCH 15/25] use async requests to refresh user ratings --- unwind/imdb.py | 6 ++--- unwind/request.py | 59 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 62 insertions(+), 3 deletions(-) diff --git a/unwind/imdb.py b/unwind/imdb.py index 6044d97..477ec64 100644 --- a/unwind/imdb.py +++ b/unwind/imdb.py @@ -6,7 +6,7 @@ from urllib.parse import urljoin from . import db from .models import Movie, Rating, User -from .request import cache_path, session, soup_from_url +from .request import asession, asoup_from_url, cache_path log = logging.getLogger(__name__) @@ -35,7 +35,7 @@ log = logging.getLogger(__name__) async def refresh_user_ratings_from_imdb(stop_on_dupe: bool = True): - with session() as s: + async with asession() as s: s.headers["Accept-Language"] = "en-US, en;q=0.5" for user in await db.get_all(User): @@ -152,7 +152,7 @@ ForgedRequest = namedtuple("ForgedRequest", "url headers") async def parse_page(url: str) -> tuple[list[Rating], str | None]: ratings = [] - soup = soup_from_url(url) + soup = await asoup_from_url(url) meta = soup.find("meta", property="pageId") headline = soup.h1 diff --git a/unwind/request.py b/unwind/request.py index a4d1778..4579313 100644 --- a/unwind/request.py +++ b/unwind/request.py @@ -202,6 +202,56 @@ def _http_get(s: _Session_T, url: str, *args, **kwds) -> _Response_T: return resp +@_throttle(1, 1, random) +async def _ahttp_get(s: _ASession_T, url: str, *args, **kwds) -> _Response_T: + req = s.build_request(method="GET", url=url, *args, **kwds) + + cachefile = cache_path(req) if config.debug else None + + if cachefile: + if cachefile.exists(): + log.debug( + "💾 loading %s (%a) from cache %s ...", req.url, req.headers, cachefile + ) + with cachefile.open() as fp: + resp = _CachedResponse(**json.load(fp)) + if 300 <= resp.status_code <= 399: + raise _RedirectError( + from_url=resp.url, to_url=resp.headers["location"], is_cached=True + ) + return cast(_Response_T, resp) + + log.debug("⚡️ loading %s (%a) ...", req.url, req.headers) + resp = await s.send(req, follow_redirects=False, stream=True) + resp.raise_for_status() + + await resp.aread() # Download the response stream to allow `resp.text` access. + + if cachefile: + log.debug( + "💾 writing response to cache: %s (%a) -> %s", + req.url, + req.headers, + cachefile, + ) + with cachefile.open("w") as fp: + json.dump( + { + "status_code": resp.status_code, + "text": resp.text, + "url": str(resp.url), + "headers": dict(resp.headers), + }, + fp, + ) + + if resp.is_redirect: + # Redirects could mean trouble, we need to stay on top of that! + raise _RedirectError(from_url=str(resp.url), to_url=resp.headers["location"]) + + return resp + + def soup_from_url(url): """Return a BeautifulSoup instance from the contents for the given URL.""" with session() as s: @@ -211,6 +261,15 @@ def soup_from_url(url): return soup +async def asoup_from_url(url): + """Return a BeautifulSoup instance from the contents for the given URL.""" + async with asession() as s: + r = await _ahttp_get(s, url) + + soup = bs4.BeautifulSoup(r.text, "html5lib") + return soup + + def _last_modified_from_response(resp: _Response_T) -> float | None: if last_mod := resp.headers.get("last-modified"): try: From 324eb8e9b7f27d6057d87ba899df2e3fcc18d1b0 Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 18:12:50 +0100 Subject: [PATCH 16/25] fix tests for Pytest-Asyncio running in strict mode --- tests/conftest.py | 6 ++++-- tests/test_db.py | 8 ++++---- tests/test_imdb.py | 4 ++-- tests/test_web.py | 6 ++---- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 0fd79ea..e57d3e1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,8 @@ import asyncio import pytest +import pytest_asyncio + from unwind import db @@ -13,7 +15,7 @@ def event_loop(): loop.close() -@pytest.fixture(scope="session") +@pytest_asyncio.fixture(scope="session") async def shared_conn(): c = db.shared_connection() await c.connect() @@ -24,7 +26,7 @@ async def shared_conn(): await c.disconnect() -@pytest.fixture +@pytest_asyncio.fixture async def conn(shared_conn): async with shared_conn.transaction(force_rollback=True): yield shared_conn diff --git a/tests/test_db.py b/tests/test_db.py index caeaf69..13a7de4 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -3,10 +3,9 @@ import pytest from unwind import db, models, web_models -pytestmark = pytest.mark.asyncio - -async def test_add_and_get(shared_conn): +@pytest.mark.asyncio +async def test_add_and_get(shared_conn: db.Database): async with shared_conn.transaction(force_rollback=True): m1 = models.Movie( @@ -31,7 +30,8 @@ async def test_add_and_get(shared_conn): assert m2 == await db.get(models.Movie, id=str(m2.id)) -async def test_find_ratings(shared_conn): +@pytest.mark.asyncio +async def test_find_ratings(shared_conn: db.Database): async with shared_conn.transaction(force_rollback=True): m1 = models.Movie( diff --git a/tests/test_imdb.py b/tests/test_imdb.py index 13a03fd..48308a7 100644 --- a/tests/test_imdb.py +++ b/tests/test_imdb.py @@ -3,12 +3,12 @@ from unwind.imdb import imdb_rating_from_score, score_from_imdb_rating @pytest.mark.parametrize("rating", (x / 10 for x in range(10, 101))) -def test_rating_conversion(rating): +def test_rating_conversion(rating: float): assert rating == imdb_rating_from_score(score_from_imdb_rating(rating)) @pytest.mark.parametrize("score", range(0, 101)) -def test_score_conversion(score): +def test_score_conversion(score: int): # Because our score covers 101 discrete values and IMDb's rating only 91 # discrete values, the mapping is non-injective, i.e. 10 values can't be # mapped uniquely. diff --git a/tests/test_web.py b/tests/test_web.py index 55c2d23..250447d 100644 --- a/tests/test_web.py +++ b/tests/test_web.py @@ -4,13 +4,11 @@ import pytest from unwind import create_app from unwind import db, models, imdb -# https://pypi.org/project/pytest-asyncio/ -pytestmark = pytest.mark.asyncio - app = create_app() -async def test_app(shared_conn): +@pytest.mark.asyncio +async def test_app(shared_conn: db.Database): async with shared_conn.transaction(force_rollback=True): # https://www.starlette.io/testclient/ From 65ae10c500da9771e247c6181c1f7d8b9b35547d Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 18:14:16 +0100 Subject: [PATCH 17/25] include test files in linting --- scripts/lint-py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/lint-py b/scripts/lint-py index b2040d7..54cb0f2 100755 --- a/scripts/lint-py +++ b/scripts/lint-py @@ -4,7 +4,7 @@ cd "$RUN_DIR" [ -z "${DEBUG:-}" ] || set -x -autoflake --quiet --check --recursive unwind -isort unwind -black unwind +autoflake --quiet --check --recursive unwind tests +isort unwind tests +black unwind tests pyright From 9ffcc5357150cecde26f5e6f8fccceaf92411efb Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 18:15:14 +0100 Subject: [PATCH 18/25] apply auto-formatting to tests --- tests/test_db.py | 4 +--- tests/test_imdb.py | 1 + tests/test_web.py | 6 ++---- 3 files changed, 4 insertions(+), 7 deletions(-) diff --git a/tests/test_db.py b/tests/test_db.py index 13a7de4..ac8e64b 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -1,4 +1,5 @@ from datetime import datetime + import pytest from unwind import db, models, web_models @@ -7,7 +8,6 @@ from unwind import db, models, web_models @pytest.mark.asyncio async def test_add_and_get(shared_conn: db.Database): async with shared_conn.transaction(force_rollback=True): - m1 = models.Movie( title="test movie", release_year=2013, @@ -33,7 +33,6 @@ async def test_add_and_get(shared_conn: db.Database): @pytest.mark.asyncio async def test_find_ratings(shared_conn: db.Database): async with shared_conn.transaction(force_rollback=True): - m1 = models.Movie( title="test movie", release_year=2013, @@ -157,4 +156,3 @@ async def test_find_ratings(shared_conn: db.Database): rows = await db.find_ratings(title="test", include_unrated=True) ratings = tuple(web_models.Rating(**r) for r in rows) assert (web_models.Rating.from_movie(m1),) == ratings - diff --git a/tests/test_imdb.py b/tests/test_imdb.py index 48308a7..00467ce 100644 --- a/tests/test_imdb.py +++ b/tests/test_imdb.py @@ -1,4 +1,5 @@ import pytest + from unwind.imdb import imdb_rating_from_score, score_from_imdb_rating diff --git a/tests/test_web.py b/tests/test_web.py index 250447d..358c2a2 100644 --- a/tests/test_web.py +++ b/tests/test_web.py @@ -1,8 +1,7 @@ -from starlette.testclient import TestClient import pytest +from starlette.testclient import TestClient -from unwind import create_app -from unwind import db, models, imdb +from unwind import create_app, db, imdb, models app = create_app() @@ -10,7 +9,6 @@ app = create_app() @pytest.mark.asyncio async def test_app(shared_conn: db.Database): async with shared_conn.transaction(force_rollback=True): - # https://www.starlette.io/testclient/ client = TestClient(app) response = client.get("/api/v1/movies") From b408fee1bc6f2716a131d697bdd5fe327f78b3c8 Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 18:16:43 +0100 Subject: [PATCH 19/25] add auto-formatting of tests to .git-blame-ignore-revs --- .git-blame-ignore-revs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 3104b77..d64e091 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -1,2 +1,4 @@ # Apply Black v23.1.0 formatting changes. 8a8bfce89de23d987386a35b659532bbac373788 +# Apply auto-formatting to tests. +9ffcc5357150cecde26f5e6f8fccceaf92411efb From 9fb24741a1c2851551af5361202d7780a7b9c33e Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 4 Feb 2023 18:17:13 +0100 Subject: [PATCH 20/25] remove unused sync request functions --- unwind/request.py | 193 +--------------------------------------------- 1 file changed, 1 insertion(+), 192 deletions(-) diff --git a/unwind/request.py b/unwind/request.py index 4579313..4e57564 100644 --- a/unwind/request.py +++ b/unwind/request.py @@ -4,7 +4,7 @@ import logging import os import tempfile from collections import deque -from contextlib import asynccontextmanager, contextmanager +from contextlib import asynccontextmanager from dataclasses import dataclass, field from functools import wraps from hashlib import md5 @@ -25,44 +25,14 @@ if config.debug and config.cachedir: _shared_asession = None -_shared_session = None _ASession_T = httpx.AsyncClient -_Session_T = httpx.Client _Response_T = httpx.Response _T = TypeVar("_T") _P = ParamSpec("_P") -@contextmanager -def session(): - """Return the shared request session. - - The session is shared by all request functions and provides cookie - persistence and connection pooling. - Opening the session before making a request allows you to set headers - or change the retry behavior. - """ - global _shared_session - - if _shared_session: - yield _shared_session - return - - _shared_session = _Session() - try: - yield _shared_session - finally: - _shared_session = None - - -def _Session() -> _Session_T: - s = _Session_T() - s.headers["user-agent"] = "Mozilla/5.0 Gecko/20100101 unwind/20230203" - return s - - @asynccontextmanager async def asession(): """Return the shared request session. @@ -158,50 +128,6 @@ def cache_path(req) -> Path | None: return config.cachedir / md5(sig.encode()).hexdigest() -@_throttle(1, 1, random) -def _http_get(s: _Session_T, url: str, *args, **kwds) -> _Response_T: - req = s.build_request(method="GET", url=url, *args, **kwds) - - cachefile = cache_path(req) if config.debug else None - - if cachefile: - if cachefile.exists(): - log.debug( - f"💾 loading {req.url} ({req.headers!a}) from cache {cachefile} ..." - ) - with cachefile.open() as fp: - resp = _CachedResponse(**json.load(fp)) - if 300 <= resp.status_code <= 399: - raise _RedirectError( - from_url=resp.url, to_url=resp.headers["location"], is_cached=True - ) - return cast(_Response_T, resp) - - log.debug(f"⚡️ loading {req.url} ({req.headers!a}) ...") - resp = s.send(req, follow_redirects=False, stream=True) - resp.raise_for_status() - - resp.read() # Download the response stream to allow `resp.text` access. - - if cachefile: - with cachefile.open("w") as fp: - json.dump( - { - "status_code": resp.status_code, - "text": resp.text, - "url": resp.url, - "headers": dict(resp.headers), - }, - fp, - ) - - if resp.is_redirect: - # Redirects could mean trouble, we need to stay on top of that! - raise _RedirectError(from_url=str(resp.url), to_url=resp.headers["location"]) - - return resp - - @_throttle(1, 1, random) async def _ahttp_get(s: _ASession_T, url: str, *args, **kwds) -> _Response_T: req = s.build_request(method="GET", url=url, *args, **kwds) @@ -252,15 +178,6 @@ async def _ahttp_get(s: _ASession_T, url: str, *args, **kwds) -> _Response_T: return resp -def soup_from_url(url): - """Return a BeautifulSoup instance from the contents for the given URL.""" - with session() as s: - r = _http_get(s, url) - - soup = bs4.BeautifulSoup(r.text, "html5lib") - return soup - - async def asoup_from_url(url): """Return a BeautifulSoup instance from the contents for the given URL.""" async with asession() as s: @@ -282,114 +199,6 @@ def _last_modified_from_file(path: Path) -> float: return path.stat().st_mtime -def download( - url: str, - file_path: Path | str | None = None, - *, - replace_existing: bool | None = None, - only_if_newer: bool = False, - timeout: float | None = None, - chunk_callback=None, - response_callback=None, -) -> bytes | None: - """Download a file. - - If `file_path` is `None` return the remote content, otherwise write the - content to the given file path. - Existing files will not be overwritten unless `replace_existing` is set. - Setting `only_if_newer` will check if the remote file is newer than the - local file, otherwise the download will be aborted. - """ - if replace_existing is None: - replace_existing = only_if_newer - - file_exists = None - if file_path is not None: - file_path = Path(file_path) - - file_exists = file_path.exists() and file_path.stat().st_size - if file_exists and not replace_existing: - raise FileExistsError(23, "Would replace existing file", str(file_path)) - - with session() as s: - headers = {} - if file_exists and only_if_newer: - assert file_path - file_lastmod = _last_modified_from_file(file_path) - headers["if-modified-since"] = email.utils.formatdate( - file_lastmod, usegmt=True - ) - - req = s.build_request(method="GET", url=url, headers=headers, timeout=timeout) - - log.debug("⚡️ loading %s (%s) ...", req.url, req.headers) - resp = s.send(req, follow_redirects=True, stream=True) - - if response_callback is not None: - try: - response_callback(resp) - except: - log.exception("🐛 Error in response callback.") - - log.debug("☕️ Response status: %s; headers: %s", resp.status_code, resp.headers) - - if resp.status_code == httpx.codes.NOT_MODIFIED: - log.debug("✋ Remote file has not changed, skipping download.") - return - - resp.raise_for_status() - - if file_path is None: - resp.read() # Download the response stream to allow `resp.content` access. - return resp.content - - assert replace_existing is True - - resp_lastmod = _last_modified_from_response(resp) - - # Check Last-Modified in case the server ignored If-Modified-Since. - # XXX also check Content-Length? - if file_exists and only_if_newer and resp_lastmod is not None: - assert file_lastmod - - if resp_lastmod <= file_lastmod: - log.debug("✋ Local file is newer, skipping download.") - resp.close() - return - - # Create intermediate directories if necessary. - download_dir = file_path.parent - download_dir.mkdir(parents=True, exist_ok=True) - - # Write content to temp file. - tempdir = download_dir - tempfd, tempfile_path = tempfile.mkstemp( - dir=tempdir, prefix=f".download-{file_path.name}." - ) - one_mb = 2**20 - chunk_size = 8 * one_mb - try: - log.debug("💾 Writing to temp file %s ...", tempfile_path) - for chunk in resp.iter_bytes(chunk_size): - os.write(tempfd, chunk) - if chunk_callback: - try: - chunk_callback(chunk) - except: - log.exception("🐛 Error in chunk callback.") - finally: - os.close(tempfd) - - # Move downloaded file to destination. - if file_exists: - log.debug("💾 Replacing existing file: %s", file_path) - Path(tempfile_path).replace(file_path) - - # Fix file attributes. - if resp_lastmod is not None: - os.utime(file_path, (resp_lastmod, resp_lastmod)) - - async def adownload( url: str, *, From c2a9cfecf1073d9fcf5a27924756f353f9a66407 Mon Sep 17 00:00:00 2001 From: ducklet Date: Fri, 17 Mar 2023 23:17:28 +0100 Subject: [PATCH 21/25] make listener port in Dockerfile configurable --- Dockerfile | 5 ++++- scripts/server | 9 ++++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index cef1ba6..7be9b6c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,7 +19,10 @@ USER 10000:10001 COPY . ./ ENV UNWIND_DATA="/data" -VOLUME ["/data"] +VOLUME $UNWIND_DATA + +ENV UNWIND_PORT=8097 +EXPOSE $UNWIND_PORT ENTRYPOINT ["/var/app/run"] CMD ["server"] diff --git a/scripts/server b/scripts/server index 5440717..599cb7f 100755 --- a/scripts/server +++ b/scripts/server @@ -1,7 +1,14 @@ #!/bin/sh -eu +: "${UNWIND_PORT:=8097}" + cd "$RUN_DIR" [ -z "${DEBUG:-}" ] || set -x -exec uvicorn --host 0.0.0.0 --factory unwind:create_app +export UNWIND_PORT + +exec uvicorn \ + --host 0.0.0.0 \ + --port "$UNWIND_PORT" \ + --factory unwind:create_app From 5efa3ef2c26e9dd8d4218216b7757ef0216675d8 Mon Sep 17 00:00:00 2001 From: ducklet Date: Fri, 17 Mar 2023 23:34:47 +0100 Subject: [PATCH 22/25] run Poetry update --- poetry.lock | 80 +++++++++++++++++++++++++------------------------- pyproject.toml | 6 ++-- 2 files changed, 43 insertions(+), 43 deletions(-) diff --git a/poetry.lock b/poetry.lock index a5184f2..8ff7d02 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. [[package]] name = "aiosqlite" @@ -54,14 +54,14 @@ tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy [[package]] name = "autoflake" -version = "2.0.1" +version = "2.0.2" description = "Removes unused imports and unused variables" -category = "main" +category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "autoflake-2.0.1-py3-none-any.whl", hash = "sha256:143b0843667734af53532c443e950c787316b9b1155b2273558260b44836e8e4"}, - {file = "autoflake-2.0.1.tar.gz", hash = "sha256:1ce520131b7f396915242fe91e57221f4d42408529bbe3ae93adafed286591e0"}, + {file = "autoflake-2.0.2-py3-none-any.whl", hash = "sha256:a82d8efdcbbb7129a8a23238c529fb9d9919c562e26bb7963ea6890fbfff7d02"}, + {file = "autoflake-2.0.2.tar.gz", hash = "sha256:e0164421ff13f805f08a023e249d84200bd00463d213b490906bfefa67e83830"}, ] [package.dependencies] @@ -397,14 +397,14 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] [[package]] name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] @@ -436,31 +436,31 @@ files = [ [[package]] name = "pathspec" -version = "0.11.0" +version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.11.0-py3-none-any.whl", hash = "sha256:3a66eb970cbac598f9e5ccb5b2cf58930cd8e3ed86d393d541eaf2d8b1705229"}, - {file = "pathspec-0.11.0.tar.gz", hash = "sha256:64d338d4e0914e91c1792321e6907b5a593f1ab1851de7fc269557a21b30ebbc"}, + {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, + {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, ] [[package]] name = "platformdirs" -version = "2.6.2" +version = "3.1.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, - {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, + {file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"}, + {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"}, ] [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -482,7 +482,7 @@ testing = ["pytest", "pytest-benchmark"] name = "pyflakes" version = "3.0.1" description = "passive checker of Python programs" -category = "main" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -492,14 +492,14 @@ files = [ [[package]] name = "pyright" -version = "1.1.292" +version = "1.1.299" description = "Command line wrapper for pyright" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pyright-1.1.292-py3-none-any.whl", hash = "sha256:23d1f14b15afe38bb7a7117b9861ad0546aff078da312d294e60a727445c23ff"}, - {file = "pyright-1.1.292.tar.gz", hash = "sha256:035ea1af6fabfdcc80c0afb545f677bd377114157d69779cce2a642ff894e51c"}, + {file = "pyright-1.1.299-py3-none-any.whl", hash = "sha256:f34dfd0c2fcade34f9878b1fc69cb9456476dc78227e0a2fa046107ec55c0235"}, + {file = "pyright-1.1.299.tar.gz", hash = "sha256:b3a9a6affa1252c52793e8663ade59ff966f8495ecfad6328deffe59cfc5a9a9"}, ] [package.dependencies] @@ -511,14 +511,14 @@ dev = ["twine (>=3.4.1)"] [[package]] name = "pytest" -version = "7.2.1" +version = "7.2.2" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"}, - {file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"}, + {file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"}, + {file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"}, ] [package.dependencies] @@ -570,14 +570,14 @@ idna2008 = ["idna"] [[package]] name = "setuptools" -version = "67.1.0" +version = "67.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.1.0-py3-none-any.whl", hash = "sha256:a7687c12b444eaac951ea87a9627c4f904ac757e7abdc5aac32833234af90378"}, - {file = "setuptools-67.1.0.tar.gz", hash = "sha256:e261cdf010c11a41cb5cb5f1bf3338a7433832029f559a6a7614bd42a967c300"}, + {file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"}, + {file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"}, ] [package.extras] @@ -611,14 +611,14 @@ files = [ [[package]] name = "soupsieve" -version = "2.3.2.post1" +version = "2.4" description = "A modern CSS selector implementation for Beautiful Soup." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, - {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, + {file = "soupsieve-2.4-py3-none-any.whl", hash = "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955"}, + {file = "soupsieve-2.4.tar.gz", hash = "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a"}, ] [[package]] @@ -673,7 +673,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and platform_machine == \"aarch64\" or python_version >= \"3\" and platform_machine == \"ppc64le\" or python_version >= \"3\" and platform_machine == \"x86_64\" or python_version >= \"3\" and platform_machine == \"amd64\" or python_version >= \"3\" and platform_machine == \"AMD64\" or python_version >= \"3\" and platform_machine == \"win32\" or python_version >= \"3\" and platform_machine == \"WIN32\""} [package.extras] aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] @@ -698,14 +698,14 @@ sqlcipher = ["sqlcipher3-binary"] [[package]] name = "starlette" -version = "0.23.1" +version = "0.26.1" description = "The little ASGI library that shines." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "starlette-0.23.1-py3-none-any.whl", hash = "sha256:ec69736c90be8dbfc6ec6800ba6feb79c8c44f9b1706c0b2bb27f936bcf362cc"}, - {file = "starlette-0.23.1.tar.gz", hash = "sha256:8510e5b3d670326326c5c1d4cb657cc66832193fe5d5b7015a51c7b1e1b1bf42"}, + {file = "starlette-0.26.1-py3-none-any.whl", hash = "sha256:e87fce5d7cbdde34b76f0ac69013fd9d190d581d80681493016666e6f96c6d5e"}, + {file = "starlette-0.26.1.tar.gz", hash = "sha256:41da799057ea8620e4667a3e69a5b1923ebd32b1819c8fa75634bbe8d8bea9bd"}, ] [package.dependencies] @@ -728,14 +728,14 @@ files = [ [[package]] name = "uvicorn" -version = "0.20.0" +version = "0.21.1" description = "The lightning-fast ASGI server." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "uvicorn-0.20.0-py3-none-any.whl", hash = "sha256:c3ed1598a5668208723f2bb49336f4509424ad198d6ab2615b7783db58d919fd"}, - {file = "uvicorn-0.20.0.tar.gz", hash = "sha256:a4e12017b940247f836bc90b72e725d7dfd0c8ed1c51eb365f5ba30d9f5127d8"}, + {file = "uvicorn-0.21.1-py3-none-any.whl", hash = "sha256:e47cac98a6da10cd41e6fd036d472c6f58ede6c5dbee3dbee3ef7a100ed97742"}, + {file = "uvicorn-0.21.1.tar.gz", hash = "sha256:0fac9cb342ba099e0d582966005f3fdba5b0290579fed4a6266dc702ca7bb032"}, ] [package.dependencies] @@ -760,4 +760,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "05dc90985d69552657feca4adf2827f52a8e156eb77eda65cf6775357bed5472" +content-hash = "a43dcab0548fc3be276e10ff19fe108211e5bdc42a8a161c744eeb4d20b14294" diff --git a/pyproject.toml b/pyproject.toml index f357688..08c45e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,17 +9,17 @@ license = "LOL" python = "^3.11" beautifulsoup4 = "^4.9.3" html5lib = "^1.1" -starlette = "^0.23.1" +starlette = "^0.26" ulid-py = "^1.1.0" databases = {extras = ["sqlite"], version = "^0.7.0"} -uvicorn = "^0.20.0" -autoflake = "^2.0.1" +uvicorn = "^0.21" httpx = "^0.23.3" [tool.poetry.group.dev] optional = true [tool.poetry.group.dev.dependencies] +autoflake = "*" pytest = "*" pyright = "*" black = "*" From eb76ab18673fce6b9f13a8c732ae72c81c26ded2 Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 18 Mar 2023 00:09:58 +0100 Subject: [PATCH 23/25] fix using deprecated Starlette feature `on_startup`/`on_shutdown` will be removed in v1.0. --- unwind/web.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/unwind/web.py b/unwind/web.py index cab9a9e..eb08e9c 100644 --- a/unwind/web.py +++ b/unwind/web.py @@ -1,4 +1,5 @@ import asyncio +import contextlib import logging import secrets from json.decoder import JSONDecodeError @@ -612,6 +613,13 @@ def auth_error(request, err): return unauthorized(str(err)) +@contextlib.asynccontextmanager +async def lifespan(app: Starlette): + await open_connection_pool() + yield + await close_connection_pool() + + def create_app(): if config.loglevel == "DEBUG": logging.basicConfig( @@ -622,8 +630,7 @@ def create_app(): log.debug(f"Log level: {config.loglevel}") return Starlette( - on_startup=[open_connection_pool], - on_shutdown=[close_connection_pool], + lifespan=lifespan, routes=[ Mount(f"{config.api_base}v1", routes=route.registered), ], From 496c51402af20370c67881a6c920848eb58d63af Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 18 Mar 2023 00:29:29 +0100 Subject: [PATCH 24/25] fix using deprecated SQLAlchemy mapping access --- scripts/tests | 1 + unwind/db.py | 16 ++++++++-------- unwind/models.py | 3 ++- 3 files changed, 11 insertions(+), 9 deletions(-) diff --git a/scripts/tests b/scripts/tests index 4237558..df8b5a0 100755 --- a/scripts/tests +++ b/scripts/tests @@ -10,5 +10,6 @@ trap 'rm "$dbfile"' EXIT TERM INT QUIT [ -z "${DEBUG:-}" ] || set -x +SQLALCHEMY_WARN_20=1 \ UNWIND_STORAGE="$dbfile" \ python -m pytest "$@" diff --git a/unwind/db.py b/unwind/db.py index 4a4fc68..968111e 100644 --- a/unwind/db.py +++ b/unwind/db.py @@ -263,7 +263,7 @@ async def get( query += f" ORDER BY {order_by}" async with locked_connection() as conn: row = await conn.fetch_one(query=query, values=values) - return fromplain(model, row, serialized=True) if row else None + return fromplain(model, row._mapping, serialized=True) if row else None async def get_many(model: Type[ModelType], **kwds) -> Iterable[ModelType]: @@ -283,7 +283,7 @@ async def get_many(model: Type[ModelType], **kwds) -> Iterable[ModelType]: query = f"SELECT {fields_} FROM {model._table} WHERE {cond}" async with locked_connection() as conn: rows = await conn.fetch_all(query=query, values=values) - return (fromplain(model, row, serialized=True) for row in rows) + return (fromplain(model, row._mapping, serialized=True) for row in rows) async def get_all(model: Type[ModelType], **kwds) -> Iterable[ModelType]: @@ -294,7 +294,7 @@ async def get_all(model: Type[ModelType], **kwds) -> Iterable[ModelType]: query = f"SELECT {fields_} FROM {model._table} WHERE {cond}" async with locked_connection() as conn: rows = await conn.fetch_all(query=query, values=values) - return (fromplain(model, row, serialized=True) for row in rows) + return (fromplain(model, row._mapping, serialized=True) for row in rows) async def update(item): @@ -467,7 +467,7 @@ async def find_ratings( """ async with locked_connection() as conn: rows = await conn.fetch_all(bindparams(query, values)) - movie_ids = tuple(r["movie_id"] for r in rows) + movie_ids = tuple(r._mapping["movie_id"] for r in rows) if include_unrated and len(movie_ids) < limit_rows: sqlin, sqlin_vals = sql_in("id", movie_ids, not_=True) @@ -486,7 +486,7 @@ async def find_ratings( {**values, **sqlin_vals, "limit_rows": limit_rows - len(movie_ids)}, ) ) - movie_ids += tuple(r["movie_id"] for r in rows) + movie_ids += tuple(r._mapping["movie_id"] for r in rows) return await ratings_for_movie_ids(ids=movie_ids) @@ -528,7 +528,7 @@ async def ratings_for_movie_ids( async with locked_connection() as conn: rows = await conn.fetch_all(bindparams(query, vals)) - return tuple(dict(r) for r in rows) + return tuple(dict(r._mapping) for r in rows) def sql_fields(tp: Type): @@ -584,7 +584,7 @@ async def ratings_for_movies( async with locked_connection() as conn: rows = await conn.fetch_all(query, values) - return (fromplain(Rating, row, serialized=True) for row in rows) + return (fromplain(Rating, row._mapping, serialized=True) for row in rows) async def find_movies( @@ -651,7 +651,7 @@ async def find_movies( async with locked_connection() as conn: rows = await conn.fetch_all(bindparams(query, values)) - movies = [fromplain(Movie, row, serialized=True) for row in rows] + movies = [fromplain(Movie, row._mapping, serialized=True) for row in rows] if not user_ids: return ((m, []) for m in movies) diff --git a/unwind/models.py b/unwind/models.py index b34d6c3..4480307 100644 --- a/unwind/models.py +++ b/unwind/models.py @@ -10,6 +10,7 @@ from typing import ( ClassVar, Container, Literal, + Mapping, Type, TypeVar, Union, @@ -144,7 +145,7 @@ def asplain( return d -def fromplain(cls: Type[T], d: dict[str, Any], *, serialized: bool = False) -> T: +def fromplain(cls: Type[T], d: Mapping, *, serialized: bool = False) -> T: """Return an instance of the given model using the given data. If `serialized` is `True`, collection types (lists, dicts, etc.) will be From 15e6e491024512453b73e2c4649b96d8689be2a3 Mon Sep 17 00:00:00 2001 From: ducklet Date: Sat, 18 Mar 2023 00:29:41 +0100 Subject: [PATCH 25/25] remove unused functions --- unwind/db.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/unwind/db.py b/unwind/db.py index 968111e..c07b3a9 100644 --- a/unwind/db.py +++ b/unwind/db.py @@ -535,22 +535,6 @@ def sql_fields(tp: Type): return (f"{tp._table}.{f.name}" for f in fields(tp)) -def sql_fieldmap(tp: Type): - """-> {alias: (table, field_name)}""" - return {f"{tp._table}_{f.name}": (tp._table, f.name) for f in fields(tp)} - - -def mux(*tps: Type): - return ", ".join( - f"{t}.{n} AS {k}" for tp in tps for k, (t, n) in sql_fieldmap(tp).items() - ) - - -def demux(tp: Type[ModelType], row) -> ModelType: - d = {n: row[k] for k, (_, n) in sql_fieldmap(tp).items()} - return fromplain(tp, d, serialized=True) - - def sql_in(column: str, values: Iterable[T], not_=False) -> tuple[str, dict[str, T]]: c = column.replace(".", "___") value_map = {f"{c}_{i}": v for i, v in enumerate(values, start=1)}