diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000..d64e091 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,4 @@ +# Apply Black v23.1.0 formatting changes. +8a8bfce89de23d987386a35b659532bbac373788 +# Apply auto-formatting to tests. +9ffcc5357150cecde26f5e6f8fccceaf92411efb diff --git a/Dockerfile b/Dockerfile index 7522a11..7be9b6c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.10-alpine +FROM docker.io/library/python:3.11-alpine RUN apk update --no-cache \ && apk upgrade --no-cache \ @@ -11,20 +11,18 @@ WORKDIR /var/app COPY requirements.txt ./ -# Required to build greenlet on Alpine, dependency of SQLAlchemy 1.4. -RUN apk add --no-cache \ - --virtual .build-deps \ - g++ gcc musl-dev \ - && pip install --no-cache-dir --upgrade \ - --requirement requirements.txt \ - && apk del .build-deps +RUN pip install --no-cache-dir --upgrade \ + --requirement requirements.txt USER 10000:10001 COPY . ./ ENV UNWIND_DATA="/data" -VOLUME ["/data"] +VOLUME $UNWIND_DATA + +ENV UNWIND_PORT=8097 +EXPOSE $UNWIND_PORT ENTRYPOINT ["/var/app/run"] CMD ["server"] diff --git a/poetry.lock b/poetry.lock index 9405979..8ff7d02 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,13 +1,16 @@ +# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. + [[package]] name = "aiosqlite" -version = "0.17.0" +version = "0.18.0" description = "asyncio bridge to the standard sqlite3 module" category = "main" optional = false -python-versions = ">=3.6" - -[package.dependencies] -typing_extensions = ">=3.7.2" +python-versions = ">=3.7" +files = [ + {file = "aiosqlite-0.18.0-py3-none-any.whl", hash = "sha256:c3511b841e3a2c5614900ba1d179f366826857586f78abd75e7cbeb88e75a557"}, + {file = "aiosqlite-0.18.0.tar.gz", hash = "sha256:faa843ef5fb08bafe9a9b3859012d3d9d6f77ce3637899de20606b7fc39aa213"}, +] [[package]] name = "anyio" @@ -16,6 +19,10 @@ description = "High level compatibility layer for multiple asynchronous event lo category = "main" optional = false python-versions = ">=3.6.2" +files = [ + {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, + {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, +] [package.dependencies] idna = ">=2.8" @@ -28,25 +35,49 @@ trio = ["trio (>=0.16,<0.22)"] [[package]] name = "attrs" -version = "22.1.0" +version = "22.2.0" description = "Classes Without Boilerplate" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" +files = [ + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, +] [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] + +[[package]] +name = "autoflake" +version = "2.0.2" +description = "Removes unused imports and unused variables" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "autoflake-2.0.2-py3-none-any.whl", hash = "sha256:a82d8efdcbbb7129a8a23238c529fb9d9919c562e26bb7963ea6890fbfff7d02"}, + {file = "autoflake-2.0.2.tar.gz", hash = "sha256:e0164421ff13f805f08a023e249d84200bd00463d213b490906bfefa67e83830"}, +] + +[package.dependencies] +pyflakes = ">=3.0.0" [[package]] name = "beautifulsoup4" -version = "4.11.1" +version = "4.11.2" description = "Screen-scraping library" category = "main" optional = false python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.11.2-py3-none-any.whl", hash = "sha256:0e79446b10b3ecb499c1556f7e228a53e64a2bfcebd455f370d8927cb5b59e39"}, + {file = "beautifulsoup4-4.11.2.tar.gz", hash = "sha256:bc4bdda6717de5a2987436fb8d72f45dc90dd856bdfd512a1314ce90349a0106"}, +] [package.dependencies] soupsieve = ">1.2" @@ -57,18 +88,45 @@ lxml = ["lxml"] [[package]] name = "black" -version = "22.10.0" +version = "23.1.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"}, + {file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"}, + {file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"}, + {file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"}, + {file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"}, + {file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"}, + {file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"}, + {file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"}, + {file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"}, + {file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"}, + {file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"}, + {file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"}, + {file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"}, + {file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"}, +] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" +packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -78,22 +136,15 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2022.9.24" +version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" - -[[package]] -name = "charset-normalizer" -version = "2.1.1" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" -optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode_backport = ["unicodedata2"] +files = [ + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, +] [[package]] name = "click" @@ -102,29 +153,41 @@ description = "Composable command line interface toolkit" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "colorama" -version = "0.4.5" +version = "0.4.6" description = "Cross-platform colored terminal text." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] [[package]] name = "databases" -version = "0.6.1" +version = "0.7.0" description = "Async database support for Python." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "databases-0.7.0-py3-none-any.whl", hash = "sha256:cf5da4b8a3e3cd038c459529725ebb64931cbbb7a091102664f20ef8f6cefd0d"}, + {file = "databases-0.7.0.tar.gz", hash = "sha256:ea2d419d3d2eb80595b7ceb8f282056f080af62efe2fb9bcd83562f93ec4b674"}, +] [package.dependencies] aiosqlite = {version = "*", optional = true, markers = "extra == \"sqlite\""} -sqlalchemy = ">=1.4,<1.5" +sqlalchemy = ">=1.4.42,<1.5" [package.extras] aiomysql = ["aiomysql"] @@ -138,14 +201,77 @@ sqlite = ["aiosqlite"] [[package]] name = "greenlet" -version = "1.1.2" +version = "2.0.2" description = "Lightweight in-process concurrent programming" category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +files = [ + {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, + {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, + {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, + {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, + {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, + {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, + {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, + {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, + {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, + {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, + {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, + {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, + {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, + {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, + {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, + {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, + {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, + {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, + {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, + {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, + {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, + {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, +] [package.extras] -docs = ["Sphinx"] +docs = ["Sphinx", "docutils (<0.18)"] +test = ["objgraph", "psutil"] [[package]] name = "h11" @@ -154,6 +280,10 @@ description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] [[package]] name = "html5lib" @@ -162,6 +292,10 @@ description = "HTML parser based on the WHATWG HTML specification" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, + {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, +] [package.dependencies] six = ">=1.9" @@ -173,6 +307,52 @@ chardet = ["chardet (>=2.2)"] genshi = ["genshi"] lxml = ["lxml"] +[[package]] +name = "httpcore" +version = "0.16.3" +description = "A minimal low-level HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, + {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, +] + +[package.dependencies] +anyio = ">=3.0,<5.0" +certifi = "*" +h11 = ">=0.13,<0.15" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "httpx" +version = "0.23.3" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, + {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, +] + +[package.dependencies] +certifi = "*" +httpcore = ">=0.15.0,<0.17.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + [[package]] name = "idna" version = "3.4" @@ -180,36 +360,52 @@ description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] [[package]] name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] [[package]] name = "isort" -version = "5.10.1" +version = "5.12.0" description = "A Python utility / library to sort Python imports." category = "dev" optional = false -python-versions = ">=3.6.1,<4.0" +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] [package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] plugins = ["setuptools"] -requirements_deprecated_finder = ["pip-api", "pipreqs"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] [[package]] name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] [[package]] name = "nodeenv" @@ -218,40 +414,53 @@ description = "Node.js virtual environment builder" category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, + {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, +] [package.dependencies] setuptools = "*" [[package]] name = "packaging" -version = "21.3" +version = "23.0" description = "Core utilities for Python packages" category = "dev" optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" +files = [ + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, +] [[package]] name = "pathspec" -version = "0.10.1" +version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, + {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, +] [[package]] name = "platformdirs" -version = "2.5.2" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "3.1.1" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"}, + {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"}, +] [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -260,37 +469,38 @@ description = "plugin and hook calling mechanisms for python" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" +name = "pyflakes" +version = "3.0.1" +description = "passive checker of Python programs" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "dev" -optional = false -python-versions = ">=3.6.8" - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] +python-versions = ">=3.6" +files = [ + {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, + {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, +] [[package]] name = "pyright" -version = "1.1.276" +version = "1.1.299" description = "Command line wrapper for pyright" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pyright-1.1.299-py3-none-any.whl", hash = "sha256:f34dfd0c2fcade34f9878b1fc69cb9456476dc78227e0a2fa046107ec55c0235"}, + {file = "pyright-1.1.299.tar.gz", hash = "sha256:b3a9a6affa1252c52793e8663ade59ff966f8495ecfad6328deffe59cfc5a9a9"}, +] [package.dependencies] nodeenv = ">=1.6.0" @@ -301,11 +511,15 @@ dev = ["twine (>=3.4.1)"] [[package]] name = "pytest" -version = "7.1.3" +version = "7.2.2" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"}, + {file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"}, +] [package.dependencies] attrs = ">=19.2.0" @@ -313,55 +527,62 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -tomli = ">=1.0.0" [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] [[package]] name = "pytest-asyncio" -version = "0.20.1" +version = "0.20.3" description = "Pytest support for asyncio" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.20.3.tar.gz", hash = "sha256:83cbf01169ce3e8eb71c6c278ccb0574d1a7a3bb8eaaf5e50e0ad342afb33b36"}, + {file = "pytest_asyncio-0.20.3-py3-none-any.whl", hash = "sha256:f129998b209d04fcc65c96fc85c11e5316738358909a8399e93be553d7656442"}, +] [package.dependencies] pytest = ">=6.1.0" [package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] [[package]] -name = "requests" -version = "2.28.1" -description = "Python HTTP for Humans." +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" category = "main" optional = false -python-versions = ">=3.7, <4" +python-versions = "*" +files = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] [package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} [package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] +idna2008 = ["idna"] [[package]] name = "setuptools" -version = "65.5.0" +version = "67.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"}, + {file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"}, +] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -371,6 +592,10 @@ description = "Python 2 and 3 compatibility utilities" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] [[package]] name = "sniffio" @@ -379,84 +604,115 @@ description = "Sniff out which async library your code is running under" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] [[package]] name = "soupsieve" -version = "2.3.2.post1" +version = "2.4" description = "A modern CSS selector implementation for Beautiful Soup." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "soupsieve-2.4-py3-none-any.whl", hash = "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955"}, + {file = "soupsieve-2.4.tar.gz", hash = "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a"}, +] [[package]] -name = "SQLAlchemy" -version = "1.4.25" +name = "sqlalchemy" +version = "1.4.46" description = "Database Abstraction Library" category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "SQLAlchemy-1.4.46-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:7001f16a9a8e06488c3c7154827c48455d1c1507d7228d43e781afbc8ceccf6d"}, + {file = "SQLAlchemy-1.4.46-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c7a46639ba058d320c9f53a81db38119a74b8a7a1884df44d09fbe807d028aaf"}, + {file = "SQLAlchemy-1.4.46-cp27-cp27m-win32.whl", hash = "sha256:c04144a24103135ea0315d459431ac196fe96f55d3213bfd6d39d0247775c854"}, + {file = "SQLAlchemy-1.4.46-cp27-cp27m-win_amd64.whl", hash = "sha256:7b81b1030c42b003fc10ddd17825571603117f848814a344d305262d370e7c34"}, + {file = "SQLAlchemy-1.4.46-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:939f9a018d2ad04036746e15d119c0428b1e557470361aa798e6e7d7f5875be0"}, + {file = "SQLAlchemy-1.4.46-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b7f4b6aa6e87991ec7ce0e769689a977776db6704947e562102431474799a857"}, + {file = "SQLAlchemy-1.4.46-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dbf17ac9a61e7a3f1c7ca47237aac93cabd7f08ad92ac5b96d6f8dea4287fc1"}, + {file = "SQLAlchemy-1.4.46-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7f8267682eb41a0584cf66d8a697fef64b53281d01c93a503e1344197f2e01fe"}, + {file = "SQLAlchemy-1.4.46-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cb0ad8a190bc22d2112001cfecdec45baffdf41871de777239da6a28ed74b6"}, + {file = "SQLAlchemy-1.4.46-cp310-cp310-win32.whl", hash = "sha256:5f752676fc126edc1c4af0ec2e4d2adca48ddfae5de46bb40adbd3f903eb2120"}, + {file = "SQLAlchemy-1.4.46-cp310-cp310-win_amd64.whl", hash = "sha256:31de1e2c45e67a5ec1ecca6ec26aefc299dd5151e355eb5199cd9516b57340be"}, + {file = "SQLAlchemy-1.4.46-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d68e1762997bfebf9e5cf2a9fd0bcf9ca2fdd8136ce7b24bbd3bbfa4328f3e4a"}, + {file = "SQLAlchemy-1.4.46-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d112b0f3c1bc5ff70554a97344625ef621c1bfe02a73c5d97cac91f8cd7a41e"}, + {file = "SQLAlchemy-1.4.46-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69fac0a7054d86b997af12dc23f581cf0b25fb1c7d1fed43257dee3af32d3d6d"}, + {file = "SQLAlchemy-1.4.46-cp311-cp311-win32.whl", hash = "sha256:887865924c3d6e9a473dc82b70977395301533b3030d0f020c38fd9eba5419f2"}, + {file = "SQLAlchemy-1.4.46-cp311-cp311-win_amd64.whl", hash = "sha256:984ee13543a346324319a1fb72b698e521506f6f22dc37d7752a329e9cd00a32"}, + {file = "SQLAlchemy-1.4.46-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:9167d4227b56591a4cc5524f1b79ccd7ea994f36e4c648ab42ca995d28ebbb96"}, + {file = "SQLAlchemy-1.4.46-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d61e9ecc849d8d44d7f80894ecff4abe347136e9d926560b818f6243409f3c86"}, + {file = "SQLAlchemy-1.4.46-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3ec187acf85984263299a3f15c34a6c0671f83565d86d10f43ace49881a82718"}, + {file = "SQLAlchemy-1.4.46-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9883f5fae4fd8e3f875adc2add69f8b945625811689a6c65866a35ee9c0aea23"}, + {file = "SQLAlchemy-1.4.46-cp36-cp36m-win32.whl", hash = "sha256:535377e9b10aff5a045e3d9ada8a62d02058b422c0504ebdcf07930599890eb0"}, + {file = "SQLAlchemy-1.4.46-cp36-cp36m-win_amd64.whl", hash = "sha256:18cafdb27834fa03569d29f571df7115812a0e59fd6a3a03ccb0d33678ec8420"}, + {file = "SQLAlchemy-1.4.46-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:a1ad90c97029cc3ab4ffd57443a20fac21d2ec3c89532b084b073b3feb5abff3"}, + {file = "SQLAlchemy-1.4.46-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4847f4b1d822754e35707db913396a29d874ee77b9c3c3ef3f04d5a9a6209618"}, + {file = "SQLAlchemy-1.4.46-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c5a99282848b6cae0056b85da17392a26b2d39178394fc25700bcf967e06e97a"}, + {file = "SQLAlchemy-1.4.46-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4b1cc7835b39835c75cf7c20c926b42e97d074147c902a9ebb7cf2c840dc4e2"}, + {file = "SQLAlchemy-1.4.46-cp37-cp37m-win32.whl", hash = "sha256:c522e496f9b9b70296a7675272ec21937ccfc15da664b74b9f58d98a641ce1b6"}, + {file = "SQLAlchemy-1.4.46-cp37-cp37m-win_amd64.whl", hash = "sha256:ae067ab639fa499f67ded52f5bc8e084f045d10b5ac7bb928ae4ca2b6c0429a5"}, + {file = "SQLAlchemy-1.4.46-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:e3c1808008124850115a3f7e793a975cfa5c8a26ceeeb9ff9cbb4485cac556df"}, + {file = "SQLAlchemy-1.4.46-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d164df3d83d204c69f840da30b292ac7dc54285096c6171245b8d7807185aa"}, + {file = "SQLAlchemy-1.4.46-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b33ffbdbbf5446cf36cd4cc530c9d9905d3c2fe56ed09e25c22c850cdb9fac92"}, + {file = "SQLAlchemy-1.4.46-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d94682732d1a0def5672471ba42a29ff5e21bb0aae0afa00bb10796fc1e28dd"}, + {file = "SQLAlchemy-1.4.46-cp38-cp38-win32.whl", hash = "sha256:f8cb80fe8d14307e4124f6fad64dfd87ab749c9d275f82b8b4ec84c84ecebdbe"}, + {file = "SQLAlchemy-1.4.46-cp38-cp38-win_amd64.whl", hash = "sha256:07e48cbcdda6b8bc7a59d6728bd3f5f574ffe03f2c9fb384239f3789c2d95c2e"}, + {file = "SQLAlchemy-1.4.46-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:1b1e5e96e2789d89f023d080bee432e2fef64d95857969e70d3cadec80bd26f0"}, + {file = "SQLAlchemy-1.4.46-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3714e5b33226131ac0da60d18995a102a17dddd42368b7bdd206737297823ad"}, + {file = "SQLAlchemy-1.4.46-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:955162ad1a931fe416eded6bb144ba891ccbf9b2e49dc7ded39274dd9c5affc5"}, + {file = "SQLAlchemy-1.4.46-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6e4cb5c63f705c9d546a054c60d326cbde7421421e2d2565ce3e2eee4e1a01f"}, + {file = "SQLAlchemy-1.4.46-cp39-cp39-win32.whl", hash = "sha256:51e1ba2884c6a2b8e19109dc08c71c49530006c1084156ecadfaadf5f9b8b053"}, + {file = "SQLAlchemy-1.4.46-cp39-cp39-win_amd64.whl", hash = "sha256:315676344e3558f1f80d02535f410e80ea4e8fddba31ec78fe390eff5fb8f466"}, + {file = "SQLAlchemy-1.4.46.tar.gz", hash = "sha256:6913b8247d8a292ef8315162a51931e2b40ce91681f1b6f18f697045200c4a30"}, +] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and platform_machine == \"aarch64\" or python_version >= \"3\" and platform_machine == \"ppc64le\" or python_version >= \"3\" and platform_machine == \"x86_64\" or python_version >= \"3\" and platform_machine == \"amd64\" or python_version >= \"3\" and platform_machine == \"AMD64\" or python_version >= \"3\" and platform_machine == \"win32\" or python_version >= \"3\" and platform_machine == \"WIN32\""} [package.extras] aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.0)", "greenlet (!=0.4.17)"] -mariadb_connector = ["mariadb (>=1.0.1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] mssql = ["pyodbc"] -mssql_pymssql = ["pymssql"] -mssql_pyodbc = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql_connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql_pg8000 = ["pg8000 (>=1.16.6)"] -postgresql_psycopg2binary = ["psycopg2-binary"] -postgresql_psycopg2cffi = ["psycopg2cffi"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3_binary"] +sqlcipher = ["sqlcipher3-binary"] [[package]] name = "starlette" -version = "0.17.1" +version = "0.26.1" description = "The little ASGI library that shines." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "starlette-0.26.1-py3-none-any.whl", hash = "sha256:e87fce5d7cbdde34b76f0ac69013fd9d190d581d80681493016666e6f96c6d5e"}, + {file = "starlette-0.26.1.tar.gz", hash = "sha256:41da799057ea8620e4667a3e69a5b1923ebd32b1819c8fa75634bbe8d8bea9bd"}, +] [package.dependencies] -anyio = ">=3.0.0,<4" +anyio = ">=3.4.0,<5" [package.extras] -full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "main" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "typing-extensions" -version = "4.4.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" -optional = false -python-versions = ">=3.7" +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] [[package]] name = "ulid-py" @@ -465,34 +721,29 @@ description = "Universally Unique Lexicographically Sortable Identifier" category = "main" optional = false python-versions = "*" - -[[package]] -name = "urllib3" -version = "1.26.12" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +files = [ + {file = "ulid-py-1.1.0.tar.gz", hash = "sha256:dc6884be91558df077c3011b9fb0c87d1097cb8fc6534b11f310161afd5738f0"}, + {file = "ulid_py-1.1.0-py2.py3-none-any.whl", hash = "sha256:b56a0f809ef90d6020b21b89a87a48edc7c03aea80e5ed5174172e82d76e3987"}, +] [[package]] name = "uvicorn" -version = "0.19.0" +version = "0.21.1" description = "The lightning-fast ASGI server." category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "uvicorn-0.21.1-py3-none-any.whl", hash = "sha256:e47cac98a6da10cd41e6fd036d472c6f58ede6c5dbee3dbee3ef7a100ed97742"}, + {file = "uvicorn-0.21.1.tar.gz", hash = "sha256:0fac9cb342ba099e0d582966005f3fdba5b0290579fed4a6266dc702ca7bb032"}, +] [package.dependencies] click = ">=7.0" h11 = ">=0.8" [package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.0)"] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "webencodings" @@ -501,274 +752,12 @@ description = "Character encoding aliases for legacy web content" category = "main" optional = false python-versions = "*" - -[metadata] -lock-version = "1.1" -python-versions = "^3.10" -content-hash = "d61472062b64d83922624a383db8d855764147f0e646b323f204d831778311a5" - -[metadata.files] -aiosqlite = [ - {file = "aiosqlite-0.17.0-py3-none-any.whl", hash = "sha256:6c49dc6d3405929b1d08eeccc72306d3677503cc5e5e43771efc1e00232e8231"}, - {file = "aiosqlite-0.17.0.tar.gz", hash = "sha256:f0e6acc24bc4864149267ac82fb46dfb3be4455f99fe21df82609cc6e6baee51"}, -] -anyio = [ - {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, - {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, -] -attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, -] -beautifulsoup4 = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, -] -black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, -] -certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, -] -click = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] -colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, -] -databases = [ - {file = "databases-0.6.1-py3-none-any.whl", hash = "sha256:47fae85d82d8227049f08b154019913c3ad2f6057ceb0b5ebb36703be6f5666b"}, - {file = "databases-0.6.1.tar.gz", hash = "sha256:0a69c6983a27e10a5b75ffa094486f1febadd9d5a8db016e69b8c2f6a354dc30"}, -] -greenlet = [ - {file = "greenlet-1.1.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6"}, - {file = "greenlet-1.1.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:aec52725173bd3a7b56fe91bc56eccb26fbdff1386ef123abb63c84c5b43b63a"}, - {file = "greenlet-1.1.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:833e1551925ed51e6b44c800e71e77dacd7e49181fdc9ac9a0bf3714d515785d"}, - {file = "greenlet-1.1.2-cp27-cp27m-win32.whl", hash = "sha256:aa5b467f15e78b82257319aebc78dd2915e4c1436c3c0d1ad6f53e47ba6e2713"}, - {file = "greenlet-1.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:40b951f601af999a8bf2ce8c71e8aaa4e8c6f78ff8afae7b808aae2dc50d4c40"}, - {file = "greenlet-1.1.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:95e69877983ea39b7303570fa6760f81a3eec23d0e3ab2021b7144b94d06202d"}, - {file = "greenlet-1.1.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:356b3576ad078c89a6107caa9c50cc14e98e3a6c4874a37c3e0273e4baf33de8"}, - {file = "greenlet-1.1.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8639cadfda96737427330a094476d4c7a56ac03de7265622fcf4cfe57c8ae18d"}, - {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497"}, - {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1"}, - {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58"}, - {file = "greenlet-1.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b336501a05e13b616ef81ce329c0e09ac5ed8c732d9ba7e3e983fcc1a9e86965"}, - {file = "greenlet-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708"}, - {file = "greenlet-1.1.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23"}, - {file = "greenlet-1.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee"}, - {file = "greenlet-1.1.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:fa877ca7f6b48054f847b61d6fa7bed5cebb663ebc55e018fda12db09dcc664c"}, - {file = "greenlet-1.1.2-cp35-cp35m-win32.whl", hash = "sha256:7cbd7574ce8e138bda9df4efc6bf2ab8572c9aff640d8ecfece1b006b68da963"}, - {file = "greenlet-1.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:903bbd302a2378f984aef528f76d4c9b1748f318fe1294961c072bdc7f2ffa3e"}, - {file = "greenlet-1.1.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:049fe7579230e44daef03a259faa24511d10ebfa44f69411d99e6a184fe68073"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:dd0b1e9e891f69e7675ba5c92e28b90eaa045f6ab134ffe70b52e948aa175b3c"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7418b6bfc7fe3331541b84bb2141c9baf1ec7132a7ecd9f375912eca810e714e"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168"}, - {file = "greenlet-1.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b8c008de9d0daba7b6666aa5bbfdc23dcd78cafc33997c9b7741ff6353bafb7f"}, - {file = "greenlet-1.1.2-cp36-cp36m-win32.whl", hash = "sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa"}, - {file = "greenlet-1.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d"}, - {file = "greenlet-1.1.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fdcec0b8399108577ec290f55551d926d9a1fa6cad45882093a7a07ac5ec147b"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:93f81b134a165cc17123626ab8da2e30c0455441d4ab5576eed73a64c025b25c"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5"}, - {file = "greenlet-1.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c5d5b35f789a030ebb95bff352f1d27a93d81069f2adb3182d99882e095cefe"}, - {file = "greenlet-1.1.2-cp37-cp37m-win32.whl", hash = "sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc"}, - {file = "greenlet-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06"}, - {file = "greenlet-1.1.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:eb6ea6da4c787111adf40f697b4e58732ee0942b5d3bd8f435277643329ba627"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f3acda1924472472ddd60c29e5b9db0cec629fbe3c5c5accb74d6d6d14773478"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b"}, - {file = "greenlet-1.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bde6792f313f4e918caabc46532aa64aa27a0db05d75b20edfc5c6f46479de2"}, - {file = "greenlet-1.1.2-cp38-cp38-win32.whl", hash = "sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd"}, - {file = "greenlet-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3"}, - {file = "greenlet-1.1.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:572e1787d1460da79590bf44304abbc0a2da944ea64ec549188fa84d89bba7ab"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:be5f425ff1f5f4b3c1e33ad64ab994eed12fc284a6ea71c5243fd564502ecbe5"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3"}, - {file = "greenlet-1.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3"}, - {file = "greenlet-1.1.2-cp39-cp39-win32.whl", hash = "sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf"}, - {file = "greenlet-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd"}, - {file = "greenlet-1.1.2.tar.gz", hash = "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a"}, -] -h11 = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] -html5lib = [ - {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, - {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, -] -idna = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -nodeenv = [ - {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, - {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] -pathspec = [ - {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, - {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, -] -platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pyright = [ - {file = "pyright-1.1.276-py3-none-any.whl", hash = "sha256:d9388405ea20a55446cb7809b1746158bdf557f9162b476f5aed71173f4ffd2b"}, - {file = "pyright-1.1.276.tar.gz", hash = "sha256:debaa08f6975dd381b9408880e36bb781ba7a1a6cf24b7868e83be41b6c8cb75"}, -] -pytest = [ - {file = "pytest-7.1.3-py3-none-any.whl", hash = "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7"}, - {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"}, -] -pytest-asyncio = [ - {file = "pytest-asyncio-0.20.1.tar.gz", hash = "sha256:626699de2a747611f3eeb64168b3575f70439b06c3d0206e6ceaeeb956e65519"}, - {file = "pytest_asyncio-0.20.1-py3-none-any.whl", hash = "sha256:2c85a835df33fda40fe3973b451e0c194ca11bc2c007eabff90bb3d156fc172b"}, -] -requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, -] -setuptools = [ - {file = "setuptools-65.5.0-py3-none-any.whl", hash = "sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356"}, - {file = "setuptools-65.5.0.tar.gz", hash = "sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -sniffio = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, -] -soupsieve = [ - {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, - {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, -] -SQLAlchemy = [ - {file = "SQLAlchemy-1.4.25-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:a36ea43919e51b0de0c0bc52bcfdad7683f6ea9fb81b340cdabb9df0e045e0f7"}, - {file = "SQLAlchemy-1.4.25-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:75cd5d48389a7635393ff5a9214b90695c06b3d74912109c3b00ce7392b69c6c"}, - {file = "SQLAlchemy-1.4.25-cp27-cp27m-win32.whl", hash = "sha256:16ef07e102d2d4f974ba9b0d4ac46345a411ad20ad988b3654d59ff08e553b1c"}, - {file = "SQLAlchemy-1.4.25-cp27-cp27m-win_amd64.whl", hash = "sha256:a79abdb404d9256afb8aeaa0d3a4bc7d3b6d8b66103d8b0f2f91febd3909976e"}, - {file = "SQLAlchemy-1.4.25-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7ad59e2e16578b6c1a2873e4888134112365605b08a6067dd91e899e026efa1c"}, - {file = "SQLAlchemy-1.4.25-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:a505ecc0642f52e7c65afb02cc6181377d833b7df0994ecde15943b18d0fa89c"}, - {file = "SQLAlchemy-1.4.25-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a28fe28c359835f3be20c89efd517b35e8f97dbb2ca09c6cf0d9ac07f62d7ef6"}, - {file = "SQLAlchemy-1.4.25-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:41a916d815a3a23cb7fff8d11ad0c9b93369ac074e91e428075e088fe57d5358"}, - {file = "SQLAlchemy-1.4.25-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:842c49dd584aedd75c2ee05f6c950730c3ffcddd21c5824ed0f820808387e1e3"}, - {file = "SQLAlchemy-1.4.25-cp36-cp36m-win32.whl", hash = "sha256:6b602e3351f59f3999e9fb8b87e5b95cb2faab6a6ecdb482382ac6fdfbee5266"}, - {file = "SQLAlchemy-1.4.25-cp36-cp36m-win_amd64.whl", hash = "sha256:6400b22e4e41cc27623a9a75630b7719579cd9a3a2027bcf16ad5aaa9a7806c0"}, - {file = "SQLAlchemy-1.4.25-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:dd4ed12a775f2cde4519f4267d3601990a97d8ecde5c944ab06bfd6e8e8ea177"}, - {file = "SQLAlchemy-1.4.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b7778a205f956755e05721eebf9f11a6ac18b2409bff5db53ce5fe7ede79831"}, - {file = "SQLAlchemy-1.4.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:08d9396a2a38e672133266b31ed39b2b1f2b5ec712b5bff5e08033970563316a"}, - {file = "SQLAlchemy-1.4.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e93978993a2ad0af43f132be3ea8805f56b2f2cd223403ec28d3e7d5c6d39ed1"}, - {file = "SQLAlchemy-1.4.25-cp37-cp37m-win32.whl", hash = "sha256:0566a6e90951590c0307c75f9176597c88ef4be2724958ca1d28e8ae05ec8822"}, - {file = "SQLAlchemy-1.4.25-cp37-cp37m-win_amd64.whl", hash = "sha256:0b08a53e40b34205acfeb5328b832f44437956d673a6c09fce55c66ab0e54916"}, - {file = "SQLAlchemy-1.4.25-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:33a1e86abad782e90976de36150d910748b58e02cd7d35680d441f9a76806c18"}, - {file = "SQLAlchemy-1.4.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ed67aae8cde4d32aacbdba4f7f38183d14443b714498eada5e5a7a37769c0b7"}, - {file = "SQLAlchemy-1.4.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1ebd69365717becaa1b618220a3df97f7c08aa68e759491de516d1c3667bba54"}, - {file = "SQLAlchemy-1.4.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0cd2d5c7ea96d3230cb20acac3d89de3b593339c1447b4d64bfcf4eac1110"}, - {file = "SQLAlchemy-1.4.25-cp38-cp38-win32.whl", hash = "sha256:c211e8ec81522ce87b0b39f0cf0712c998d4305a030459a0e115a2b3dc71598f"}, - {file = "SQLAlchemy-1.4.25-cp38-cp38-win_amd64.whl", hash = "sha256:9a1df8c93a0dd9cef0839917f0c6c49f46c75810cf8852be49884da4a7de3c59"}, - {file = "SQLAlchemy-1.4.25-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:1b38db2417b9f7005d6ceba7ce2a526bf10e3f6f635c0f163e6ed6a42b5b62b2"}, - {file = "SQLAlchemy-1.4.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e37621b37c73b034997b5116678862f38ee70e5a054821c7b19d0e55df270dec"}, - {file = "SQLAlchemy-1.4.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:91cd87d1de0111eaca11ccc3d31af441c753fa2bc22df72e5009cfb0a1af5b03"}, - {file = "SQLAlchemy-1.4.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90fe429285b171bcc252e21515703bdc2a4721008d1f13aa5b7150336f8a8493"}, - {file = "SQLAlchemy-1.4.25-cp39-cp39-win32.whl", hash = "sha256:6003771ea597346ab1e97f2f58405c6cacbf6a308af3d28a9201a643c0ac7bb3"}, - {file = "SQLAlchemy-1.4.25-cp39-cp39-win_amd64.whl", hash = "sha256:9ebe49c3960aa2219292ea2e5df6acdc425fc828f2f3d50b4cfae1692bcb5f02"}, - {file = "SQLAlchemy-1.4.25.tar.gz", hash = "sha256:1adf3d25e2e33afbcd48cfad8076f9378793be43e7fec3e4334306cac6bec138"}, -] -starlette = [ - {file = "starlette-0.17.1-py3-none-any.whl", hash = "sha256:26a18cbda5e6b651c964c12c88b36d9898481cd428ed6e063f5f29c418f73050"}, - {file = "starlette-0.17.1.tar.gz", hash = "sha256:57eab3cc975a28af62f6faec94d355a410634940f10b30d68d31cb5ec1b44ae8"}, -] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] -tomli = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] -typing-extensions = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, -] -ulid-py = [ - {file = "ulid-py-1.1.0.tar.gz", hash = "sha256:dc6884be91558df077c3011b9fb0c87d1097cb8fc6534b11f310161afd5738f0"}, - {file = "ulid_py-1.1.0-py2.py3-none-any.whl", hash = "sha256:b56a0f809ef90d6020b21b89a87a48edc7c03aea80e5ed5174172e82d76e3987"}, -] -urllib3 = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, -] -uvicorn = [ - {file = "uvicorn-0.19.0-py3-none-any.whl", hash = "sha256:cc277f7e73435748e69e075a721841f7c4a95dba06d12a72fe9874acced16f6f"}, - {file = "uvicorn-0.19.0.tar.gz", hash = "sha256:cf538f3018536edb1f4a826311137ab4944ed741d52aeb98846f52215de57f25"}, -] -webencodings = [ +files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "a43dcab0548fc3be276e10ff19fe108211e5bdc42a8a161c744eeb4d20b14294" diff --git a/pyproject.toml b/pyproject.toml index 60d94b3..08c45e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,26 +6,20 @@ authors = ["ducklet "] license = "LOL" [tool.poetry.dependencies] -python = "^3.10" -requests = "^2.25.1" +python = "^3.11" beautifulsoup4 = "^4.9.3" html5lib = "^1.1" -starlette = "^0.17.0" +starlette = "^0.26" ulid-py = "^1.1.0" -databases = {extras = ["sqlite"], version = "^0.6.1"} -toml = "^0.10.2" -uvicorn = "^0.19.0" - -[tool.poetry.group.fixes.dependencies] -# `databases` is having issues with new versions of SQLAlchemy 1.4, -# and `greenlet` is also always a pain. -SQLAlchemy = "1.4.25" -greenlet = "1.1.2" +databases = {extras = ["sqlite"], version = "^0.7.0"} +uvicorn = "^0.21" +httpx = "^0.23.3" [tool.poetry.group.dev] optional = true [tool.poetry.group.dev.dependencies] +autoflake = "*" pytest = "*" pyright = "*" black = "*" @@ -37,4 +31,14 @@ requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.pyright] -pythonVersion = "3.10" +pythonVersion = "3.11" + +[tool.isort] +profile = "black" + +[tool.autoflake] +remove-duplicate-keys = true +remove-unused-variables = true +remove-all-unused-imports = true +ignore-init-module-imports = true +ignore-pass-after-docstring = true diff --git a/scripts/lint-py b/scripts/lint-py index 84a4c65..54cb0f2 100755 --- a/scripts/lint-py +++ b/scripts/lint-py @@ -4,6 +4,7 @@ cd "$RUN_DIR" [ -z "${DEBUG:-}" ] || set -x -isort --profile black unwind -black unwind +autoflake --quiet --check --recursive unwind tests +isort unwind tests +black unwind tests pyright diff --git a/scripts/profile b/scripts/profile new file mode 100755 index 0000000..ccbfc70 --- /dev/null +++ b/scripts/profile @@ -0,0 +1,13 @@ +#!/bin/sh -eu + +cd "$RUN_DIR" + +outfile="profile-$(date '+%Y%m%d-%H%M%S').txt" + +[ -z "${DEBUG:-}" ] || set -x + +echo "# Writing profiler stats to: $outfile" +python -m cProfile -o "$outfile" -m unwind "$@" + +echo "# Loading stats file: $outfile" +python -m pstats "$outfile" diff --git a/scripts/server b/scripts/server index 5440717..599cb7f 100755 --- a/scripts/server +++ b/scripts/server @@ -1,7 +1,14 @@ #!/bin/sh -eu +: "${UNWIND_PORT:=8097}" + cd "$RUN_DIR" [ -z "${DEBUG:-}" ] || set -x -exec uvicorn --host 0.0.0.0 --factory unwind:create_app +export UNWIND_PORT + +exec uvicorn \ + --host 0.0.0.0 \ + --port "$UNWIND_PORT" \ + --factory unwind:create_app diff --git a/scripts/tests b/scripts/tests index 4237558..df8b5a0 100755 --- a/scripts/tests +++ b/scripts/tests @@ -10,5 +10,6 @@ trap 'rm "$dbfile"' EXIT TERM INT QUIT [ -z "${DEBUG:-}" ] || set -x +SQLALCHEMY_WARN_20=1 \ UNWIND_STORAGE="$dbfile" \ python -m pytest "$@" diff --git a/tests/conftest.py b/tests/conftest.py index 0fd79ea..e57d3e1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,8 @@ import asyncio import pytest +import pytest_asyncio + from unwind import db @@ -13,7 +15,7 @@ def event_loop(): loop.close() -@pytest.fixture(scope="session") +@pytest_asyncio.fixture(scope="session") async def shared_conn(): c = db.shared_connection() await c.connect() @@ -24,7 +26,7 @@ async def shared_conn(): await c.disconnect() -@pytest.fixture +@pytest_asyncio.fixture async def conn(shared_conn): async with shared_conn.transaction(force_rollback=True): yield shared_conn diff --git a/tests/test_db.py b/tests/test_db.py index caeaf69..ac8e64b 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -1,14 +1,13 @@ from datetime import datetime + import pytest from unwind import db, models, web_models -pytestmark = pytest.mark.asyncio - -async def test_add_and_get(shared_conn): +@pytest.mark.asyncio +async def test_add_and_get(shared_conn: db.Database): async with shared_conn.transaction(force_rollback=True): - m1 = models.Movie( title="test movie", release_year=2013, @@ -31,9 +30,9 @@ async def test_add_and_get(shared_conn): assert m2 == await db.get(models.Movie, id=str(m2.id)) -async def test_find_ratings(shared_conn): +@pytest.mark.asyncio +async def test_find_ratings(shared_conn: db.Database): async with shared_conn.transaction(force_rollback=True): - m1 = models.Movie( title="test movie", release_year=2013, @@ -157,4 +156,3 @@ async def test_find_ratings(shared_conn): rows = await db.find_ratings(title="test", include_unrated=True) ratings = tuple(web_models.Rating(**r) for r in rows) assert (web_models.Rating.from_movie(m1),) == ratings - diff --git a/tests/test_imdb.py b/tests/test_imdb.py index 13a03fd..00467ce 100644 --- a/tests/test_imdb.py +++ b/tests/test_imdb.py @@ -1,14 +1,15 @@ import pytest + from unwind.imdb import imdb_rating_from_score, score_from_imdb_rating @pytest.mark.parametrize("rating", (x / 10 for x in range(10, 101))) -def test_rating_conversion(rating): +def test_rating_conversion(rating: float): assert rating == imdb_rating_from_score(score_from_imdb_rating(rating)) @pytest.mark.parametrize("score", range(0, 101)) -def test_score_conversion(score): +def test_score_conversion(score: int): # Because our score covers 101 discrete values and IMDb's rating only 91 # discrete values, the mapping is non-injective, i.e. 10 values can't be # mapped uniquely. diff --git a/tests/test_web.py b/tests/test_web.py index 55c2d23..358c2a2 100644 --- a/tests/test_web.py +++ b/tests/test_web.py @@ -1,18 +1,14 @@ -from starlette.testclient import TestClient import pytest +from starlette.testclient import TestClient -from unwind import create_app -from unwind import db, models, imdb - -# https://pypi.org/project/pytest-asyncio/ -pytestmark = pytest.mark.asyncio +from unwind import create_app, db, imdb, models app = create_app() -async def test_app(shared_conn): +@pytest.mark.asyncio +async def test_app(shared_conn: db.Database): async with shared_conn.transaction(force_rollback=True): - # https://www.starlette.io/testclient/ client = TestClient(app) response = client.get("/api/v1/movies") diff --git a/unwind/__main__.py b/unwind/__main__.py index 983ede7..e802831 100644 --- a/unwind/__main__.py +++ b/unwind/__main__.py @@ -6,7 +6,7 @@ from pathlib import Path from . import config from .db import close_connection_pool, open_connection_pool from .imdb import refresh_user_ratings_from_imdb -from .imdb_import import import_from_file +from .imdb_import import download_datasets, import_from_file log = logging.getLogger(__name__) @@ -15,7 +15,7 @@ async def run_load_user_ratings_from_imdb(): await open_connection_pool() i = 0 - async for rating in refresh_user_ratings_from_imdb(): + async for _ in refresh_user_ratings_from_imdb(): i += 1 log.info("✨ Imported %s new ratings.", i) @@ -31,6 +31,10 @@ async def run_import_imdb_dataset(basics_path: Path, ratings_path: Path): await close_connection_pool() +async def run_download_imdb_dataset(basics_path: Path, ratings_path: Path): + await download_datasets(basics_path=basics_path, ratings_path=ratings_path) + + def getargs(): parser = argparse.ArgumentParser() commands = parser.add_subparsers(required=True) @@ -55,6 +59,25 @@ def getargs(): "--ratings", metavar="ratings_file.tsv.gz", type=Path, required=True ) + parser_download_imdb_dataset = commands.add_parser( + "download-imdb-dataset", + help="Download IMDb datasets.", + description=""" + Download IMDb datasets. + """, + ) + parser_download_imdb_dataset.add_argument( + dest="mode", + action="store_const", + const="download-imdb-dataset", + ) + parser_download_imdb_dataset.add_argument( + "--basics", metavar="basics_file.tsv.gz", type=Path, required=True + ) + parser_download_imdb_dataset.add_argument( + "--ratings", metavar="ratings_file.tsv.gz", type=Path, required=True + ) + parser_load_user_ratings_from_imdb = commands.add_parser( "load-user-ratings-from-imdb", help="Load user ratings from imdb.com.", @@ -94,6 +117,8 @@ def main(): asyncio.run(run_load_user_ratings_from_imdb()) elif args.mode == "import-imdb-dataset": asyncio.run(run_import_imdb_dataset(args.basics, args.ratings)) + elif args.mode == "download-imdb-dataset": + asyncio.run(run_download_imdb_dataset(args.basics, args.ratings)) main() diff --git a/unwind/config.py b/unwind/config.py index 6382cf5..6cc255a 100644 --- a/unwind/config.py +++ b/unwind/config.py @@ -1,8 +1,7 @@ import os +import tomllib from pathlib import Path -import toml - datadir = Path(os.getenv("UNWIND_DATA") or "./data") cachedir = ( Path(cachedir) @@ -14,7 +13,8 @@ loglevel = os.getenv("UNWIND_LOGLEVEL") or ("DEBUG" if debug else "INFO") storage_path = os.getenv("UNWIND_STORAGE", datadir / "db.sqlite") config_path = os.getenv("UNWIND_CONFIG", datadir / "config.toml") -_config = toml.load(config_path) +with open(config_path, "rb") as fd: + _config = tomllib.load(fd) api_base = _config["api"].get("base", "/api/") api_cors = _config["api"].get("cors", "*") diff --git a/unwind/db.py b/unwind/db.py index 13217e9..c07b3a9 100644 --- a/unwind/db.py +++ b/unwind/db.py @@ -4,7 +4,7 @@ import logging import re import threading from pathlib import Path -from typing import Any, Iterable, Literal, Optional, Type, TypeVar, Union +from typing import Any, Iterable, Literal, Type, TypeVar import sqlalchemy from databases import Database @@ -26,7 +26,7 @@ from .types import ULID log = logging.getLogger(__name__) T = TypeVar("T") -_shared_connection: Optional[Database] = None +_shared_connection: Database | None = None async def open_connection_pool() -> None: @@ -119,7 +119,6 @@ async def apply_db_patches(db: Database): raise RuntimeError("No statement found.") async with db.transaction(): - for query in queries: await db.execute(query) @@ -131,12 +130,12 @@ async def apply_db_patches(db: Database): await db.execute("vacuum") -async def get_import_progress() -> Optional[Progress]: +async def get_import_progress() -> Progress | None: """Return the latest import progress.""" return await get(Progress, type="import-imdb-movies", order_by="started DESC") -async def stop_import_progress(*, error: BaseException = None): +async def stop_import_progress(*, error: BaseException | None = None): """Stop the current import. If an error is given, it will be logged to the progress state. @@ -176,6 +175,8 @@ async def set_import_progress(progress: float) -> Progress: else: await add(current) + return current + _lock = threading.Lock() _prelock = threading.Lock() @@ -243,8 +244,8 @@ ModelType = TypeVar("ModelType") async def get( - model: Type[ModelType], *, order_by: str = None, **kwds -) -> Optional[ModelType]: + model: Type[ModelType], *, order_by: str | None = None, **kwds +) -> ModelType | None: """Load a model instance from the database. Passing `kwds` allows to filter the instance to load. You have to encode the @@ -262,7 +263,7 @@ async def get( query += f" ORDER BY {order_by}" async with locked_connection() as conn: row = await conn.fetch_one(query=query, values=values) - return fromplain(model, row, serialized=True) if row else None + return fromplain(model, row._mapping, serialized=True) if row else None async def get_many(model: Type[ModelType], **kwds) -> Iterable[ModelType]: @@ -282,7 +283,7 @@ async def get_many(model: Type[ModelType], **kwds) -> Iterable[ModelType]: query = f"SELECT {fields_} FROM {model._table} WHERE {cond}" async with locked_connection() as conn: rows = await conn.fetch_all(query=query, values=values) - return (fromplain(model, row, serialized=True) for row in rows) + return (fromplain(model, row._mapping, serialized=True) for row in rows) async def get_all(model: Type[ModelType], **kwds) -> Iterable[ModelType]: @@ -293,7 +294,7 @@ async def get_all(model: Type[ModelType], **kwds) -> Iterable[ModelType]: query = f"SELECT {fields_} FROM {model._table} WHERE {cond}" async with locked_connection() as conn: rows = await conn.fetch_all(query=query, values=values) - return (fromplain(model, row, serialized=True) for row in rows) + return (fromplain(model, row._mapping, serialized=True) for row in rows) async def update(item): @@ -406,16 +407,16 @@ def sql_escape(s: str, char="#"): async def find_ratings( *, - title: str = None, - media_type: str = None, + title: str | None = None, + media_type: str | None = None, exact: bool = False, ignore_tv_episodes: bool = False, include_unrated: bool = False, - yearcomp: tuple[Literal["<", "=", ">"], int] = None, + yearcomp: tuple[Literal["<", "=", ">"], int] | None = None, limit_rows: int = 10, user_ids: Iterable[str] = [], ): - values: dict[str, Union[int, str]] = { + values: dict[str, int | str] = { "limit_rows": limit_rows, } @@ -466,7 +467,7 @@ async def find_ratings( """ async with locked_connection() as conn: rows = await conn.fetch_all(bindparams(query, values)) - movie_ids = tuple(r["movie_id"] for r in rows) + movie_ids = tuple(r._mapping["movie_id"] for r in rows) if include_unrated and len(movie_ids) < limit_rows: sqlin, sqlin_vals = sql_in("id", movie_ids, not_=True) @@ -485,7 +486,7 @@ async def find_ratings( {**values, **sqlin_vals, "limit_rows": limit_rows - len(movie_ids)}, ) ) - movie_ids += tuple(r["movie_id"] for r in rows) + movie_ids += tuple(r._mapping["movie_id"] for r in rows) return await ratings_for_movie_ids(ids=movie_ids) @@ -527,29 +528,13 @@ async def ratings_for_movie_ids( async with locked_connection() as conn: rows = await conn.fetch_all(bindparams(query, vals)) - return tuple(dict(r) for r in rows) + return tuple(dict(r._mapping) for r in rows) def sql_fields(tp: Type): return (f"{tp._table}.{f.name}" for f in fields(tp)) -def sql_fieldmap(tp: Type): - """-> {alias: (table, field_name)}""" - return {f"{tp._table}_{f.name}": (tp._table, f.name) for f in fields(tp)} - - -def mux(*tps: Type): - return ", ".join( - f"{t}.{n} AS {k}" for tp in tps for k, (t, n) in sql_fieldmap(tp).items() - ) - - -def demux(tp: Type[ModelType], row) -> ModelType: - d = {n: row[k] for k, (_, n) in sql_fieldmap(tp).items()} - return fromplain(tp, d, serialized=True) - - def sql_in(column: str, values: Iterable[T], not_=False) -> tuple[str, dict[str, T]]: c = column.replace(".", "___") value_map = {f"{c}_{i}": v for i, v in enumerate(values, start=1)} @@ -583,22 +568,22 @@ async def ratings_for_movies( async with locked_connection() as conn: rows = await conn.fetch_all(query, values) - return (fromplain(Rating, row, serialized=True) for row in rows) + return (fromplain(Rating, row._mapping, serialized=True) for row in rows) async def find_movies( *, - title: str = None, - media_type: str = None, + title: str | None = None, + media_type: str | None = None, exact: bool = False, ignore_tv_episodes: bool = False, - yearcomp: tuple[Literal["<", "=", ">"], int] = None, + yearcomp: tuple[Literal["<", "=", ">"], int] | None = None, limit_rows: int = 10, skip_rows: int = 0, include_unrated: bool = False, user_ids: list[ULID] = [], ) -> Iterable[tuple[Movie, list[Rating]]]: - values: dict[str, Union[int, str]] = { + values: dict[str, int | str] = { "limit_rows": limit_rows, "skip_rows": skip_rows, } @@ -650,7 +635,7 @@ async def find_movies( async with locked_connection() as conn: rows = await conn.fetch_all(bindparams(query, values)) - movies = [fromplain(Movie, row, serialized=True) for row in rows] + movies = [fromplain(Movie, row._mapping, serialized=True) for row in rows] if not user_ids: return ((m, []) for m in movies) diff --git a/unwind/imdb.py b/unwind/imdb.py index e541277..477ec64 100644 --- a/unwind/imdb.py +++ b/unwind/imdb.py @@ -2,12 +2,11 @@ import logging import re from collections import namedtuple from datetime import datetime -from typing import Optional, Tuple from urllib.parse import urljoin from . import db from .models import Movie, Rating, User -from .request import cache_path, session, soup_from_url +from .request import asession, asoup_from_url, cache_path log = logging.getLogger(__name__) @@ -35,13 +34,11 @@ log = logging.getLogger(__name__) # p.text-muted.text-small span[name=nv] [data-value] -async def refresh_user_ratings_from_imdb(stop_on_dupe=True): - - with session() as s: +async def refresh_user_ratings_from_imdb(stop_on_dupe: bool = True): + async with asession() as s: s.headers["Accept-Language"] = "en-US, en;q=0.5" for user in await db.get_all(User): - log.info("⚡️ Loading data for %s ...", user.name) try: @@ -98,7 +95,6 @@ find_movie_id = re.compile(r"/title/(?Ptt\d+)/").search def movie_and_rating_from_item(item) -> tuple[Movie, Rating]: - genres = (genre := item.find("span", "genre")) and genre.string or "" movie = Movie( title=item.h3.a.string.strip(), @@ -153,10 +149,10 @@ def movie_and_rating_from_item(item) -> tuple[Movie, Rating]: ForgedRequest = namedtuple("ForgedRequest", "url headers") -async def parse_page(url) -> Tuple[list[Rating], Optional[str]]: +async def parse_page(url: str) -> tuple[list[Rating], str | None]: ratings = [] - soup = soup_from_url(url) + soup = await asoup_from_url(url) meta = soup.find("meta", property="pageId") headline = soup.h1 @@ -170,7 +166,6 @@ async def parse_page(url) -> Tuple[list[Rating], Optional[str]]: items = soup.find_all("div", "lister-item-content") for i, item in enumerate(items): - try: movie, rating = movie_and_rating_from_item(item) except Exception as err: @@ -196,11 +191,10 @@ async def parse_page(url) -> Tuple[list[Rating], Optional[str]]: return (ratings, next_url if url != next_url else None) -async def load_ratings(user_id): +async def load_ratings(user_id: str): next_url = user_ratings_url(user_id) while next_url: - ratings, next_url = await parse_page(next_url) for i, rating in enumerate(ratings): diff --git a/unwind/imdb_import.py b/unwind/imdb_import.py index 45360aa..705db2f 100644 --- a/unwind/imdb_import.py +++ b/unwind/imdb_import.py @@ -1,10 +1,11 @@ +import asyncio import csv import gzip import logging from dataclasses import dataclass, fields from datetime import datetime, timezone from pathlib import Path -from typing import Generator, Literal, Optional, Type, TypeVar, overload +from typing import Generator, Literal, Type, TypeVar, overload from . import config, db, request from .db import add_or_update_many_movies @@ -27,10 +28,10 @@ class BasicRow: primaryTitle: str originalTitle: str isAdult: bool - startYear: Optional[int] - endYear: Optional[int] - runtimeMinutes: Optional[int] - genres: Optional[set[str]] + startYear: int | None + endYear: int | None + runtimeMinutes: int | None + genres: set[str] | None @classmethod def from_row(cls, row): @@ -100,7 +101,7 @@ title_types = { } -def gz_mtime(path) -> datetime: +def gz_mtime(path: Path) -> datetime: """Return the timestamp of the compressed file.""" g = gzip.GzipFile(path, "rb") g.peek(1) # start reading the file to fill the timestamp field @@ -108,14 +109,13 @@ def gz_mtime(path) -> datetime: return datetime.fromtimestamp(g.mtime).replace(tzinfo=timezone.utc) -def count_lines(path) -> int: +def count_lines(path: Path) -> int: i = 0 - one_mb = 2 ** 20 + one_mb = 2**20 buf_size = 8 * one_mb # 8 MiB seems to give a good read/process performance. with gzip.open(path, "rt") as f: - while buf := f.read(buf_size): i += buf.count("\n") @@ -124,19 +124,19 @@ def count_lines(path) -> int: @overload def read_imdb_tsv( - path, row_type, *, unpack: Literal[False] + path: Path, row_type, *, unpack: Literal[False] ) -> Generator[list[str], None, None]: ... @overload def read_imdb_tsv( - path, row_type: Type[T], *, unpack: Literal[True] = True + path: Path, row_type: Type[T], *, unpack: Literal[True] = True ) -> Generator[T, None, None]: ... -def read_imdb_tsv(path, row_type, *, unpack=True): +def read_imdb_tsv(path: Path, row_type, *, unpack=True): with gzip.open(path, "rt", newline="") as f: rows = csv.reader(f, delimiter="\t", quoting=csv.QUOTE_NONE) @@ -161,7 +161,7 @@ def read_imdb_tsv(path, row_type, *, unpack=True): raise -def read_ratings(path): +def read_ratings(path: Path): mtime = gz_mtime(path) rows = read_imdb_tsv(path, RatingRow) @@ -171,19 +171,20 @@ def read_ratings(path): yield m -def read_ratings_as_mapping(path): +def read_ratings_as_mapping(path: Path): """Optimized function to quickly load all ratings.""" rows = read_imdb_tsv(path, RatingRow, unpack=False) return {r[0]: (round(100 * (float(r[1]) - 1) / 9), int(r[2])) for r in rows} -def read_basics(path): +def read_basics(path: Path) -> Generator[Movie | None, None, None]: mtime = gz_mtime(path) rows = read_imdb_tsv(path, BasicRow) for row in rows: if row.startYear is None: log.debug("Skipping movie, missing year: %s", row) + yield None continue m = row.as_movie() @@ -197,20 +198,24 @@ async def import_from_file(*, basics_path: Path, ratings_path: Path): log.info("💾 Importing movies ...") total = count_lines(basics_path) - assert total != 0 + log.debug("Found %i movies.", total) + if total == 0: + raise RuntimeError(f"No movies found.") perc_next_report = 0.0 perc_step = 0.1 chunk = [] for i, m in enumerate(read_basics(basics_path)): - perc = 100 * i / total if perc >= perc_next_report: await db.set_import_progress(perc) log.info("⏳ Imported %s%%", round(perc, 1)) perc_next_report += perc_step + if m is None: + continue + if m.media_type not in { "Movie", "Short", @@ -235,10 +240,27 @@ async def import_from_file(*, basics_path: Path, ratings_path: Path): await add_or_update_many_movies(chunk) chunk = [] + log.info("👍 Imported 100%") await db.set_import_progress(100) -async def load_from_web(*, force: bool = False): +async def download_datasets(*, basics_path: Path, ratings_path: Path) -> None: + """Download IMDb movie database dumps. + + See https://www.imdb.com/interfaces/ and https://datasets.imdbws.com/ for + more information on the IMDb database dumps. + """ + basics_url = "https://datasets.imdbws.com/title.basics.tsv.gz" + ratings_url = "https://datasets.imdbws.com/title.ratings.tsv.gz" + + async with request.asession(): + await asyncio.gather( + request.adownload(ratings_url, to_path=ratings_path, only_if_newer=True), + request.adownload(basics_url, to_path=basics_path, only_if_newer=True), + ) + + +async def load_from_web(*, force: bool = False) -> None: """Refresh the full IMDb movie database. The latest dumps are first downloaded and then imported into the database. @@ -251,17 +273,13 @@ async def load_from_web(*, force: bool = False): await db.set_import_progress(0) try: - basics_url = "https://datasets.imdbws.com/title.basics.tsv.gz" - ratings_url = "https://datasets.imdbws.com/title.ratings.tsv.gz" ratings_file = config.datadir / "imdb/title.ratings.tsv.gz" basics_file = config.datadir / "imdb/title.basics.tsv.gz" ratings_mtime = ratings_file.stat().st_mtime if ratings_file.exists() else None bastics_mtime = basics_file.stat().st_mtime if basics_file.exists() else None - with request.session(): - request.download(ratings_url, ratings_file, only_if_newer=True) - request.download(basics_url, basics_file, only_if_newer=True) + await download_datasets(basics_path=basics_file, ratings_path=ratings_file) is_changed = ( ratings_mtime != ratings_file.stat().st_mtime diff --git a/unwind/models.py b/unwind/models.py index 37cd48d..4480307 100644 --- a/unwind/models.py +++ b/unwind/models.py @@ -3,13 +3,14 @@ from dataclasses import dataclass, field from dataclasses import fields as _fields from datetime import datetime, timezone from functools import partial +from types import UnionType from typing import ( Annotated, Any, ClassVar, Container, Literal, - Optional, + Mapping, Type, TypeVar, Union, @@ -19,13 +20,13 @@ from typing import ( from .types import ULID -JSON = Union[int, float, str, None, list["JSON"], dict[str, "JSON"]] +JSON = int | float | str | None | list["JSON"] | dict[str, "JSON"] JSONObject = dict[str, JSON] T = TypeVar("T") -def annotations(tp: Type) -> Optional[tuple]: +def annotations(tp: Type) -> tuple | None: return tp.__metadata__ if hasattr(tp, "__metadata__") else None @@ -42,7 +43,6 @@ def fields(class_or_instance): # XXX this might be a little slow (not sure), if so, memoize for f in _fields(class_or_instance): - if f.name == "_is_lazy": continue @@ -54,21 +54,21 @@ def fields(class_or_instance): def is_optional(tp: Type) -> bool: """Return wether the given type is optional.""" - if get_origin(tp) is not Union: + if not isinstance(tp, UnionType) and get_origin(tp) is not Union: return False args = get_args(tp) return len(args) == 2 and type(None) in args -def optional_type(tp: Type) -> Optional[Type]: +def optional_type(tp: Type) -> Type | None: """Return the wrapped type from an optional type. For example this will return `int` for `Optional[int]`. Since they're equivalent this also works for other optioning notations, like `Union[int, None]` and `int | None`. """ - if get_origin(tp) is not Union: + if not isinstance(tp, UnionType) and get_origin(tp) is not Union: return None args = get_args(tp) @@ -92,7 +92,7 @@ def _id(x: T) -> T: def asplain( - o: object, *, filter_fields: Container[str] = None, serialize: bool = False + o: object, *, filter_fields: Container[str] | None = None, serialize: bool = False ) -> dict[str, Any]: """Return the given model instance as `dict` with JSON compatible plain datatypes. @@ -109,7 +109,6 @@ def asplain( d: JSONObject = {} for f in fields(o): - if filter_fields is not None and f.name not in filter_fields: continue @@ -146,7 +145,7 @@ def asplain( return d -def fromplain(cls: Type[T], d: dict[str, Any], *, serialized: bool = False) -> T: +def fromplain(cls: Type[T], d: Mapping, *, serialized: bool = False) -> T: """Return an instance of the given model using the given data. If `serialized` is `True`, collection types (lists, dicts, etc.) will be @@ -157,7 +156,6 @@ def fromplain(cls: Type[T], d: dict[str, Any], *, serialized: bool = False) -> T dd: JSONObject = {} for f in fields(cls): - target = f.type otype = optional_type(f.type) is_opt = otype is not None @@ -188,7 +186,8 @@ def validate(o: object) -> None: vtype = type(getattr(o, f.name)) if vtype is not f.type: if get_origin(f.type) is vtype or ( - get_origin(f.type) is Union and vtype in get_args(f.type) + (isinstance(f.type, UnionType) or get_origin(f.type) is Union) + and vtype in get_args(f.type) ): continue raise ValueError(f"Invalid value type: {f.name}: {vtype}") @@ -206,7 +205,7 @@ class Progress: type: str = None state: str = None started: datetime = field(default_factory=utcnow) - stopped: Optional[str] = None + stopped: str | None = None @property def _state(self) -> dict: @@ -243,15 +242,15 @@ class Movie: id: ULID = field(default_factory=ULID) title: str = None # canonical title (usually English) - original_title: Optional[ - str - ] = None # original title (usually transscribed to latin script) + original_title: str | None = ( + None # original title (usually transscribed to latin script) + ) release_year: int = None # canonical release date media_type: str = None imdb_id: str = None - imdb_score: Optional[int] = None # range: [0,100] - imdb_votes: Optional[int] = None - runtime: Optional[int] = None # minutes + imdb_score: int | None = None # range: [0,100] + imdb_votes: int | None = None + runtime: int | None = None # minutes genres: set[str] = None created: datetime = field(default_factory=utcnow) updated: datetime = field(default_factory=utcnow) @@ -292,7 +291,7 @@ dataclass containing the ID of the linked data. The contents of the Relation are ignored or discarded when using `asplain`, `fromplain`, and `validate`. """ -Relation = Annotated[Optional[T], _RelationSentinel] +Relation = Annotated[T | None, _RelationSentinel] @dataclass @@ -309,8 +308,8 @@ class Rating: score: int = None # range: [0,100] rating_date: datetime = None - favorite: Optional[bool] = None - finished: Optional[bool] = None + favorite: bool | None = None + finished: bool | None = None def __eq__(self, other): """Return wether two Ratings are equal. @@ -342,11 +341,11 @@ class User: secret: str = None groups: list[dict[str, str]] = field(default_factory=list) - def has_access(self, group_id: Union[ULID, str], access: Access = "r"): + def has_access(self, group_id: ULID | str, access: Access = "r"): group_id = group_id if isinstance(group_id, str) else str(group_id) return any(g["id"] == group_id and access == g["access"] for g in self.groups) - def set_access(self, group_id: Union[ULID, str], access: Access): + def set_access(self, group_id: ULID | str, access: Access): group_id = group_id if isinstance(group_id, str) else str(group_id) for g in self.groups: if g["id"] == group_id: diff --git a/unwind/request.py b/unwind/request.py index 81f9f29..4e57564 100644 --- a/unwind/request.py +++ b/unwind/request.py @@ -4,19 +4,17 @@ import logging import os import tempfile from collections import deque -from contextlib import contextmanager -from dataclasses import dataclass +from contextlib import asynccontextmanager +from dataclasses import dataclass, field from functools import wraps from hashlib import md5 from pathlib import Path from random import random from time import sleep, time -from typing import Callable, Optional, Union +from typing import Callable, ParamSpec, TypeVar, cast import bs4 -import requests -from requests.status_codes import codes -from urllib3.util.retry import Retry +import httpx from . import config @@ -26,28 +24,17 @@ if config.debug and config.cachedir: config.cachedir.mkdir(exist_ok=True) -def set_retries(s: requests.Session, n: int, backoff_factor: float = 0.2): - retry = ( - Retry( - total=n, - connect=n, - read=n, - status=n, - status_forcelist=Retry.RETRY_AFTER_STATUS_CODES, - backoff_factor=backoff_factor, - ) - if n - else Retry(0, read=False) - ) - for a in s.adapters.values(): - a.max_retries = retry +_shared_asession = None + +_ASession_T = httpx.AsyncClient +_Response_T = httpx.Response + +_T = TypeVar("_T") +_P = ParamSpec("_P") -_shared_session = None - - -@contextmanager -def session(): +@asynccontextmanager +async def asession(): """Return the shared request session. The session is shared by all request functions and provides cookie @@ -55,38 +42,34 @@ def session(): Opening the session before making a request allows you to set headers or change the retry behavior. """ - global _shared_session + global _shared_asession - if _shared_session: - yield _shared_session + if _shared_asession: + yield _shared_asession return - _shared_session = Session() + _shared_asession = _ASession_T() + _shared_asession.headers[ + "user-agent" + ] = "Mozilla/5.0 Gecko/20100101 unwind/20230203" try: - yield _shared_session + async with _shared_asession: + yield _shared_asession finally: - _shared_session = None + _shared_asession = None -def Session() -> requests.Session: - s = requests.Session() - s.headers["User-Agent"] = "Mozilla/5.0 Gecko/20100101 unwind/20210506" - return s - - -def throttle( - times: int, per_seconds: float, jitter: Callable[[], float] = None -) -> Callable[[Callable], Callable]: - - calls: Deque[float] = deque(maxlen=times) +def _throttle( + times: int, per_seconds: float, jitter: Callable[[], float] | None = None +) -> Callable[[Callable[_P, _T]], Callable[_P, _T]]: + calls: deque[float] = deque(maxlen=times) if jitter is None: jitter = lambda: 0.0 - def decorator(func: Callable) -> Callable: + def decorator(func: Callable[_P, _T]) -> Callable[_P, _T]: @wraps(func) - def inner(*args, **kwds): - + def inner(*args: _P.args, **kwds: _P.kwargs): # clean up while calls: if calls[0] + per_seconds > time(): @@ -118,23 +101,19 @@ def throttle( return decorator -class CachedStr(str): - is_cached = True - - @dataclass -class CachedResponse: +class _CachedResponse: is_cached = True status_code: int text: str url: str - headers: dict[str, str] = None + headers: dict[str, str] = field(default_factory=dict) def json(self): return json.loads(self.text) -class RedirectError(RuntimeError): +class _RedirectError(RuntimeError): def __init__(self, from_url: str, to_url: str, is_cached=False): self.from_url = from_url self.to_url = to_url @@ -142,44 +121,51 @@ class RedirectError(RuntimeError): super().__init__(f"Redirected: {from_url} -> {to_url}") -def cache_path(req) -> Optional[Path]: +def cache_path(req) -> Path | None: if not config.cachedir: return sig = repr(req.url) # + repr(sorted(req.headers.items())) return config.cachedir / md5(sig.encode()).hexdigest() -@throttle(1, 1, random) -def http_get(s: requests.Session, url: str, *args, **kwds) -> requests.Response: - - req = s.prepare_request(requests.Request("GET", url, *args, **kwds)) +@_throttle(1, 1, random) +async def _ahttp_get(s: _ASession_T, url: str, *args, **kwds) -> _Response_T: + req = s.build_request(method="GET", url=url, *args, **kwds) cachefile = cache_path(req) if config.debug else None if cachefile: if cachefile.exists(): log.debug( - f"💾 loading {req.url} ({req.headers!a}) from cache {cachefile} ..." + "💾 loading %s (%a) from cache %s ...", req.url, req.headers, cachefile ) with cachefile.open() as fp: - resp = CachedResponse(**json.load(fp)) + resp = _CachedResponse(**json.load(fp)) if 300 <= resp.status_code <= 399: - raise RedirectError( + raise _RedirectError( from_url=resp.url, to_url=resp.headers["location"], is_cached=True ) - return resp + return cast(_Response_T, resp) - log.debug(f"⚡️ loading {req.url} ({req.headers!a}) ...") - resp = s.send(req, allow_redirects=False, stream=True) + log.debug("⚡️ loading %s (%a) ...", req.url, req.headers) + resp = await s.send(req, follow_redirects=False, stream=True) resp.raise_for_status() + await resp.aread() # Download the response stream to allow `resp.text` access. + if cachefile: + log.debug( + "💾 writing response to cache: %s (%a) -> %s", + req.url, + req.headers, + cachefile, + ) with cachefile.open("w") as fp: json.dump( { "status_code": resp.status_code, "text": resp.text, - "url": resp.url, + "url": str(resp.url), "headers": dict(resp.headers), }, fp, @@ -187,45 +173,46 @@ def http_get(s: requests.Session, url: str, *args, **kwds) -> requests.Response: if resp.is_redirect: # Redirects could mean trouble, we need to stay on top of that! - raise RedirectError(from_url=resp.url, to_url=resp.headers["location"]) + raise _RedirectError(from_url=str(resp.url), to_url=resp.headers["location"]) return resp -def soup_from_url(url): +async def asoup_from_url(url): """Return a BeautifulSoup instance from the contents for the given URL.""" - with session() as s: - r = http_get(s, url) + async with asession() as s: + r = await _ahttp_get(s, url) soup = bs4.BeautifulSoup(r.text, "html5lib") return soup -def last_modified_from_response(resp): - if last_mod := resp.headers.get("Last-Modified"): +def _last_modified_from_response(resp: _Response_T) -> float | None: + if last_mod := resp.headers.get("last-modified"): try: return email.utils.parsedate_to_datetime(last_mod).timestamp() except: log.exception("🐛 Received invalid value for Last-Modified: %s", last_mod) -def last_modified_from_file(path: Path): +def _last_modified_from_file(path: Path) -> float: return path.stat().st_mtime -def download( +async def adownload( url: str, - file_path: Union[Path, str] = None, *, - replace_existing: bool = None, + to_path: Path | str | None = None, + replace_existing: bool | None = None, only_if_newer: bool = False, - timeout: float = None, - verify_ssl: bool = True, + timeout: float | None = None, chunk_callback=None, response_callback=None, -): +) -> bytes | None: """Download a file. + If `to_path` is `None` return the remote content, otherwise write the + content to the given file path. Existing files will not be overwritten unless `replace_existing` is set. Setting `only_if_newer` will check if the remote file is newer than the local file, otherwise the download will be aborted. @@ -234,89 +221,103 @@ def download( replace_existing = only_if_newer file_exists = None - if file_path is not None: - file_path = Path(file_path) + if to_path is not None: + to_path = Path(to_path) - file_exists = file_path.exists() and file_path.stat().st_size + file_exists = to_path.exists() and to_path.stat().st_size if file_exists and not replace_existing: - raise FileExistsError(23, "Would replace existing file", str(file_path)) - - with session() as s: + raise FileExistsError(23, "Would replace existing file", str(to_path)) + async with asession() as s: headers = {} if file_exists and only_if_newer: - assert file_path - file_lastmod = last_modified_from_file(file_path) - headers["If-Modified-Since"] = email.utils.formatdate( + assert to_path + file_lastmod = _last_modified_from_file(to_path) + headers["if-modified-since"] = email.utils.formatdate( file_lastmod, usegmt=True ) - req = s.prepare_request(requests.Request("GET", url, headers=headers)) + req = s.build_request(method="GET", url=url, headers=headers, timeout=timeout) - log.debug("⚡️ loading %s (%s) ...", req.url, req.headers) - resp = s.send( - req, allow_redirects=True, stream=True, timeout=timeout, verify=verify_ssl - ) + log.debug("⚡️ Loading %s (%a) ...", req.url, dict(req.headers)) + resp = await s.send(req, follow_redirects=True, stream=True) - if response_callback is not None: - try: - response_callback(resp) - except: - log.exception("🐛 Error in response callback.") + try: + if response_callback is not None: + try: + response_callback(resp) + except: + log.exception("🐛 Error in response callback.") - log.debug("☕️ Response status: %s; headers: %s", resp.status_code, resp.headers) + log.debug( + "☕️ %s -> status: %s; headers: %a", + req.url, + resp.status_code, + dict(resp.headers), + ) - resp.raise_for_status() - - if resp.status_code == codes.not_modified: - log.debug("✋ Remote file has not changed, skipping download.") - return - - if file_path is None: - return resp.content - - assert replace_existing is True - - resp_lastmod = last_modified_from_response(resp) - - # Check Last-Modified in case the server ignored If-Modified-Since. - # XXX also check Content-Length? - if file_exists and only_if_newer and resp_lastmod is not None: - assert file_lastmod - - if resp_lastmod <= file_lastmod: - log.debug("✋ Local file is newer, skipping download.") - resp.close() + if resp.status_code == httpx.codes.NOT_MODIFIED: + log.debug( + "✋ Remote file has not changed, skipping download: %s -> %a", + req.url, + to_path, + ) return - # Create intermediate directories if necessary. - download_dir = file_path.parent - download_dir.mkdir(parents=True, exist_ok=True) + resp.raise_for_status() + + if to_path is None: + await resp.aread() # Download the response stream to allow `resp.content` access. + return resp.content + + resp_lastmod = _last_modified_from_response(resp) + + # Check Last-Modified in case the server ignored If-Modified-Since. + # XXX also check Content-Length? + if file_exists and only_if_newer and resp_lastmod is not None: + assert file_lastmod + + if resp_lastmod <= file_lastmod: + log.debug("✋ Local file is newer, skipping download: %a", req.url) + return + + # Create intermediate directories if necessary. + download_dir = to_path.parent + download_dir.mkdir(parents=True, exist_ok=True) + + # Write content to temp file. + tempdir = download_dir + tempfd, tempfile_path = tempfile.mkstemp( + dir=tempdir, prefix=f".download-{to_path.name}." + ) + one_mb = 2**20 + chunk_size = 8 * one_mb + try: + log.debug("💾 Writing to temp file %s ...", tempfile_path) + async for chunk in resp.aiter_bytes(chunk_size): + os.write(tempfd, chunk) + if chunk_callback: + try: + chunk_callback(chunk) + except: + log.exception("🐛 Error in chunk callback.") + finally: + os.close(tempfd) + + # Move downloaded file to destination. + if to_path.exists(): + log.debug("💾 Replacing existing file: %s", to_path) + else: + log.debug("💾 Move to destination: %s", to_path) + if replace_existing: + Path(tempfile_path).replace(to_path) + else: + Path(tempfile_path).rename(to_path) + + # Fix file attributes. + if resp_lastmod is not None: + log.debug("💾 Adjusting file timestamp: %s (%s)", to_path, resp_lastmod) + os.utime(to_path, (resp_lastmod, resp_lastmod)) - # Write content to temp file. - tempdir = download_dir - tempfd, tempfile_path = tempfile.mkstemp( - dir=tempdir, prefix=f".download-{file_path.name}." - ) - one_mb = 2 ** 20 - chunk_size = 8 * one_mb - try: - log.debug("💾 Writing to temp file %s ...", tempfile_path) - for chunk in resp.iter_content(chunk_size=chunk_size, decode_unicode=False): - os.write(tempfd, chunk) - if chunk_callback: - try: - chunk_callback(chunk) - except: - log.exception("🐛 Error in chunk callback.") finally: - os.close(tempfd) - - # Move downloaded file to destination. - if file_exists: - log.debug("💾 Replacing existing file: %s", file_path) - Path(tempfile_path).replace(file_path) - - # Fix file attributes. - if resp_lastmod is not None: - os.utime(file_path, (resp_lastmod, resp_lastmod)) + await resp.aclose() diff --git a/unwind/types.py b/unwind/types.py index a54e0ec..94c0e00 100644 --- a/unwind/types.py +++ b/unwind/types.py @@ -1,5 +1,5 @@ import re -from typing import Union, cast +from typing import cast import ulid from ulid.hints import Buffer @@ -16,7 +16,7 @@ class ULID(ulid.ULID): _pattern = re.compile(r"^[0-9A-HJKMNP-TV-Z]{26}$") - def __init__(self, buffer: Union[Buffer, ulid.ULID, str, None] = None): + def __init__(self, buffer: Buffer | ulid.ULID | str | None = None): if isinstance(buffer, str): if not self._pattern.search(buffer): raise ValueError("Invalid ULID.") diff --git a/unwind/utils.py b/unwind/utils.py index 012d1fb..f253bde 100644 --- a/unwind/utils.py +++ b/unwind/utils.py @@ -17,7 +17,10 @@ def b64padded(s: str) -> str: def phc_scrypt( - secret: bytes, *, salt: bytes = None, params: dict[Literal["n", "r", "p"], int] = {} + secret: bytes, + *, + salt: bytes | None = None, + params: dict[Literal["n", "r", "p"], int] = {}, ) -> str: """Return the scrypt expanded secret in PHC string format. @@ -30,7 +33,7 @@ def phc_scrypt( if salt is None: salt = secrets.token_bytes(16) - n = params.get("n", 2 ** 14) # CPU/Memory cost factor + n = params.get("n", 2**14) # CPU/Memory cost factor r = params.get("r", 8) # block size p = params.get("p", 1) # parallelization factor # maxmem = 2 * 128 * n * r * p diff --git a/unwind/web.py b/unwind/web.py index e194c10..eb08e9c 100644 --- a/unwind/web.py +++ b/unwind/web.py @@ -1,8 +1,9 @@ import asyncio +import contextlib import logging import secrets from json.decoder import JSONDecodeError -from typing import Literal, Optional, overload +from typing import Literal, overload from starlette.applications import Starlette from starlette.authentication import ( @@ -85,11 +86,14 @@ def truthy(s: str): return bool(s) and s.lower() in {"1", "yes", "true"} -def yearcomp(s: str): +_Yearcomp = Literal["<", "=", ">"] + + +def yearcomp(s: str) -> tuple[_Yearcomp, int] | None: if not s: return - comp: Literal["<", "=", ">"] = "=" + comp: _Yearcomp = "=" if (prefix := s[0]) in "<=>": comp = prefix # type: ignore s = s[len(prefix) :] @@ -97,7 +101,9 @@ def yearcomp(s: str): return comp, int(s) -def as_int(x, *, max: int = None, min: Optional[int] = 1, default: int = None): +def as_int( + x, *, max: int | None = None, min: int | None = 1, default: int | None = None +) -> int: try: if not isinstance(x, int): x = int(x) @@ -135,7 +141,7 @@ async def json_from_body(request, keys: list[str]) -> list: ... -async def json_from_body(request, keys: list[str] = None): +async def json_from_body(request, keys: list[str] | None = None): if not await request.body(): data = {} @@ -158,7 +164,7 @@ def is_admin(request): return "admin" in request.auth.scopes -async def auth_user(request) -> Optional[User]: +async def auth_user(request) -> User | None: if not isinstance(request.user, AuthedUser): return @@ -176,7 +182,7 @@ async def auth_user(request) -> Optional[User]: _routes = [] -def route(path: str, *, methods: list[str] = None, **kwds): +def route(path: str, *, methods: list[str] | None = None, **kwds): def decorator(func): r = Route(path, func, methods=methods, **kwds) _routes.append(r) @@ -190,7 +196,6 @@ route.registered = _routes @route("/groups/{group_id}/ratings") async def get_ratings_for_group(request): - group_id = as_ulid(request.path_params["group_id"]) group = await db.get(Group, id=str(group_id)) @@ -251,7 +256,6 @@ def not_implemented(): @route("/movies") @requires(["authenticated"]) async def list_movies(request): - params = request.query_params user = await auth_user(request) @@ -319,7 +323,6 @@ async def list_movies(request): @route("/movies", methods=["POST"]) @requires(["authenticated", "admin"]) async def add_movie(request): - not_implemented() @@ -361,7 +364,6 @@ _import_lock = asyncio.Lock() @route("/movies/_reload_imdb", methods=["POST"]) @requires(["authenticated", "admin"]) async def load_imdb_movies(request): - params = request.query_params force = truthy(params.get("force")) @@ -384,7 +386,6 @@ async def load_imdb_movies(request): @route("/users") @requires(["authenticated", "admin"]) async def list_users(request): - users = await db.get_all(User) return JSONResponse([asplain(u) for u in users]) @@ -393,7 +394,6 @@ async def list_users(request): @route("/users", methods=["POST"]) @requires(["authenticated", "admin"]) async def add_user(request): - name, imdb_id = await json_from_body(request, ["name", "imdb_id"]) # XXX restrict name @@ -415,7 +415,6 @@ async def add_user(request): @route("/users/{user_id}") @requires(["authenticated"]) async def show_user(request): - user_id = as_ulid(request.path_params["user_id"]) if is_admin(request): @@ -444,7 +443,6 @@ async def show_user(request): @route("/users/{user_id}", methods=["DELETE"]) @requires(["authenticated", "admin"]) async def remove_user(request): - user_id = as_ulid(request.path_params["user_id"]) user = await db.get(User, id=str(user_id)) @@ -462,7 +460,6 @@ async def remove_user(request): @route("/users/{user_id}", methods=["PATCH"]) @requires(["authenticated"]) async def modify_user(request): - user_id = as_ulid(request.path_params["user_id"]) if is_admin(request): @@ -510,7 +507,6 @@ async def modify_user(request): @route("/users/{user_id}/groups", methods=["POST"]) @requires(["authenticated", "admin"]) async def add_group_to_user(request): - user_id = as_ulid(request.path_params["user_id"]) user = await db.get(User, id=str(user_id)) @@ -535,21 +531,18 @@ async def add_group_to_user(request): @route("/users/{user_id}/ratings") @requires(["private"]) async def ratings_for_user(request): - not_implemented() @route("/users/{user_id}/ratings", methods=["PUT"]) @requires("authenticated") async def set_rating_for_user(request): - not_implemented() @route("/users/_reload_ratings", methods=["POST"]) @requires(["authenticated", "admin"]) async def load_imdb_user_ratings(request): - ratings = [rating async for rating in imdb.refresh_user_ratings_from_imdb()] return JSONResponse({"new_ratings": [asplain(r) for r in ratings]}) @@ -558,7 +551,6 @@ async def load_imdb_user_ratings(request): @route("/groups") @requires(["authenticated", "admin"]) async def list_groups(request): - groups = await db.get_all(Group) return JSONResponse([asplain(g) for g in groups]) @@ -567,7 +559,6 @@ async def list_groups(request): @route("/groups", methods=["POST"]) @requires(["authenticated", "admin"]) async def add_group(request): - (name,) = await json_from_body(request, ["name"]) # XXX restrict name @@ -581,7 +572,6 @@ async def add_group(request): @route("/groups/{group_id}/users", methods=["POST"]) @requires(["authenticated"]) async def add_user_to_group(request): - group_id = as_ulid(request.path_params["group_id"]) group = await db.get(Group, id=str(group_id)) @@ -623,6 +613,13 @@ def auth_error(request, err): return unauthorized(str(err)) +@contextlib.asynccontextmanager +async def lifespan(app: Starlette): + await open_connection_pool() + yield + await close_connection_pool() + + def create_app(): if config.loglevel == "DEBUG": logging.basicConfig( @@ -633,8 +630,7 @@ def create_app(): log.debug(f"Log level: {config.loglevel}") return Starlette( - on_startup=[open_connection_pool], - on_shutdown=[close_connection_pool], + lifespan=lifespan, routes=[ Mount(f"{config.api_base}v1", routes=route.registered), ], diff --git a/unwind/web_models.py b/unwind/web_models.py index 06bcb8c..6e83e1d 100644 --- a/unwind/web_models.py +++ b/unwind/web_models.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import Container, Iterable, Optional +from typing import Container, Iterable from . import imdb, models @@ -10,17 +10,17 @@ Score100 = int # [0, 100] @dataclass class Rating: canonical_title: str - imdb_score: Optional[Score100] - imdb_votes: Optional[int] + imdb_score: Score100 | None + imdb_votes: int | None media_type: str movie_imdb_id: str - original_title: Optional[str] + original_title: str | None release_year: int - user_id: Optional[str] - user_score: Optional[Score100] + user_id: str | None + user_score: Score100 | None @classmethod - def from_movie(cls, movie: models.Movie, *, rating: models.Rating = None): + def from_movie(cls, movie: models.Movie, *, rating: models.Rating | None = None): return cls( canonical_title=movie.title, imdb_score=movie.imdb_score, @@ -37,11 +37,11 @@ class Rating: @dataclass class RatingAggregate: canonical_title: str - imdb_score: Optional[Score100] - imdb_votes: Optional[int] + imdb_score: Score100 | None + imdb_votes: int | None link: URL media_type: str - original_title: Optional[str] + original_title: str | None user_scores: list[Score100] year: int