Compare commits

..

24 commits

Author SHA1 Message Date
2ce8924d1b chore: upgrade to Python 3.13 & run update 2025-05-15 21:58:37 +02:00
2814be8975 feat: switch to Uv 2025-05-15 21:58:37 +02:00
5c272b5110 run update 2024-07-21 14:48:32 +02:00
380d6ff186 fix: support new user ratings page markup
We use __NEXT_DATA__ from the page to find the user's latest rated
movies.
We found that at least in one case (of a Video Game) the movie details
were wrong. Normally this shouldn't be a problem though because we know
all the movies already and we keep the values we already have. Otherwise
the data from __NEXT_DATA__ seems more accurate and complete.
2024-07-21 14:46:45 +02:00
d7530e6bb0 fix: support new "most popular 100" & "bottom 100" HTML
The previous version had all 100 movies rendered into the HTML.  The new
version has only the top 25 rendered into HTML, but the whole list has
been made available as LD+JSON data.
Since we can easily support both, we don't (yet) remove the old parser.
2024-07-14 16:24:59 +02:00
aaaf66c715 run update 2024-07-14 15:47:24 +02:00
0a7933bbba fix: loading multiple oscars per movie 2024-05-25 18:23:13 +02:00
02a9621734 feat: add import script for Academy awards 2024-05-25 16:44:12 +02:00
f723459333 fix: open connection before using it 2024-05-20 19:01:21 +02:00
73d5b1fd73 Merge branch 'feat/charts' 2024-05-20 17:01:10 +02:00
63f8a98dfa feat: run alembic patches at container start 2024-05-20 16:52:37 +02:00
dd39849b8d chore: typing 2024-05-19 22:49:46 +02:00
b0f5ec4cc9 feat: add awards to REST response 2024-05-19 21:42:24 +02:00
f0f69c1954 chore: add more typing info 2024-05-19 11:23:38 +02:00
76a69b6340 chore: replace TypeVar with native syntax 2024-05-19 02:57:13 +02:00
1ea09c1a45 feat: use Alembic to initialize the database
This completely removes the previous DB patching mechanism.
When this is first run for an existing installation of Unwind, depending
on its version it might lead to problems because the database's schema
won't match the code.
To avoid that issue, when upgrading Unwind to this version make sure to
STOP the old application, install this new version but DON'T start it,
instead use `alembic upgrade head` to run the outstanding patches, and
only then start the application.
2024-05-19 02:25:36 +02:00
5e4e70c9dc fix: setting the log level should only affect Unwind itself
Calling Unwind with DEBUG=1 caused a lot of debug messages from
dependencies like HTTPX, aiosqlite, etc.
2024-05-19 00:15:30 +02:00
22c44bfa60 chore: move existing CLI commands into separate files 2024-05-19 00:12:06 +02:00
f7fc84c050 feat: add CLI command to load IMDb charts
This introduces a generalized module interface for CLI commands.
2024-05-18 23:46:56 +02:00
1789b2ce45 fix: encode query params for GQL request 2024-05-18 23:38:33 +02:00
0747ca5658 fix: always use named constraints in SQLAlchemy 2024-05-18 23:35:07 +02:00
f102e07256 feat: add a table to store award information 2024-05-18 23:32:10 +02:00
5eb7211b59 fix: SQL integer column types
We used NUMBER[sic!] as column type in our SQL, which does not exist.
The way SQLite works this mapped to NUMERIC, which is not what we meant,
we really wanted INTEGER here.
2024-05-18 18:54:46 +02:00
feb60bf658 feat: add Alembic
This adds the `ratings_index` to code, which so far wasn't reflected in
SQLAlchemy's metadata.
2024-05-18 18:54:46 +02:00
62 changed files with 2735 additions and 2306 deletions

View file

@ -1 +1 @@
3.12 3.13

View file

@ -13,20 +13,24 @@ WORKDIR /var/app
COPY build/requirements.txt ./ COPY build/requirements.txt ./
RUN pip install --no-cache-dir --upgrade \ RUN pip install --no-cache-dir \
--require-hashes \
--requirement requirements.txt --requirement requirements.txt
USER 10000:10001 USER 10000:10001
COPY run ./ COPY alembic.ini entrypoint.sh pyproject.toml run ./
COPY alembic ./alembic
COPY scripts ./scripts COPY scripts ./scripts
COPY unwind ./unwind COPY unwind ./unwind
RUN pip install --no-cache-dir --editable .
ENV UNWIND_DATA="/data" ENV UNWIND_DATA="/data"
VOLUME $UNWIND_DATA VOLUME $UNWIND_DATA
ENV UNWIND_PORT=8097 ENV UNWIND_PORT=8097
EXPOSE $UNWIND_PORT EXPOSE $UNWIND_PORT
ENTRYPOINT ["/var/app/run"] ENTRYPOINT ["/var/app/entrypoint.sh"]
CMD ["server"] CMD ["server"]

39
alembic.ini Normal file
View file

@ -0,0 +1,39 @@
[alembic]
script_location = alembic
file_template = %%(epoch)s-%%(rev)s_%%(slug)s
timezone = UTC
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

108
alembic/env.py Normal file
View file

@ -0,0 +1,108 @@
import asyncio
from logging.config import fileConfig
import sqlalchemy as sa
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
from unwind import db, models
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
def is_different_type(
context,
inspected_column: sa.Column,
metadata_column: sa.Column,
inspected_type: sa.types.TypeEngine,
metadata_type: sa.types.TypeEngine,
) -> bool | None:
# We used "TEXT" in our manual SQL, which in SQLite is the same as VARCHAR, but
# for SQLAlchemy/Alembic looks different.
equiv_types = [(sa.TEXT, sa.String)]
for types in equiv_types:
if isinstance(inspected_type, types) and isinstance(metadata_type, types):
return False
return None # defer to default compare implementation
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
context.configure(
url=db._connection_uri(),
target_metadata=models.metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=is_different_type,
render_as_batch=True,
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
context.configure(
connection=connection,
target_metadata=models.metadata,
compare_type=is_different_type,
render_as_batch=True,
)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
url=db._connection_uri(),
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
# Support having a (sync) connection passed in from another script.
if (conn := config.attributes.get("connection")) and isinstance(
conn, sa.Connection
):
do_run_migrations(conn)
else:
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

26
alembic/script.py.mako Normal file
View file

@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: str | None = ${repr(down_revision)}
branch_labels: str | Sequence[str] | None = ${repr(branch_labels)}
depends_on: str | Sequence[str] | None = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View file

@ -0,0 +1,69 @@
"""fix data types
Revision ID: c08ae04dc482
Revises:
Create Date: 2024-05-18 16:24:31.152480+00:00
"""
from typing import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "c08ae04dc482"
down_revision: str | None = None
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("ratings", schema=None) as batch_op:
batch_op.alter_column(
"score",
existing_type=sa.NUMERIC(),
type_=sa.Integer(),
existing_nullable=False,
)
batch_op.alter_column(
"favorite",
existing_type=sa.NUMERIC(),
type_=sa.Integer(),
existing_nullable=True,
)
batch_op.alter_column(
"finished",
existing_type=sa.NUMERIC(),
type_=sa.Integer(),
existing_nullable=True,
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("ratings", schema=None) as batch_op:
batch_op.alter_column(
"finished",
existing_type=sa.Integer(),
type_=sa.NUMERIC(),
existing_nullable=True,
)
batch_op.alter_column(
"favorite",
existing_type=sa.Integer(),
type_=sa.NUMERIC(),
existing_nullable=True,
)
batch_op.alter_column(
"score",
existing_type=sa.Integer(),
type_=sa.NUMERIC(),
existing_nullable=False,
)
# ### end Alembic commands ###

View file

@ -0,0 +1,44 @@
"""add awards table
Revision ID: 62882ef5e3ff
Revises: c08ae04dc482
Create Date: 2024-05-18 16:35:10.145964+00:00
"""
from typing import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "62882ef5e3ff"
down_revision: str | None = "c08ae04dc482"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"awards",
sa.Column("id", sa.String(), nullable=False),
sa.Column("movie_id", sa.String(), nullable=False),
sa.Column("category", sa.String(), nullable=False),
sa.Column("details", sa.String(), nullable=False),
sa.Column("created", sa.String(), nullable=False),
sa.Column("updated", sa.String(), nullable=False),
sa.ForeignKeyConstraint(
["movie_id"],
["movies.id"],
),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("awards")
# ### end Alembic commands ###

View file

@ -0,0 +1,41 @@
"""use named constraints
See https://alembic.sqlalchemy.org/en/latest/naming.html
Revision ID: f17c7ca9afa4
Revises: 62882ef5e3ff
Create Date: 2024-05-18 17:06:27.696713+00:00
"""
from typing import Sequence
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "f17c7ca9afa4"
down_revision: str | None = "62882ef5e3ff"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("movies") as batch_op:
batch_op.create_unique_constraint(batch_op.f("uq_movies_imdb_id"), ["imdb_id"])
with op.batch_alter_table("users", schema=None) as batch_op:
batch_op.create_unique_constraint(batch_op.f("uq_users_imdb_id"), ["imdb_id"])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("users", schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f("uq_users_imdb_id"), type_="unique")
with op.batch_alter_table("movies", schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f("uq_movies_imdb_id"), type_="unique")
# ### end Alembic commands ###

View file

@ -0,0 +1,38 @@
"""remove db_patches table
We replace our old patch process with Alembic's.
Revision ID: 8b06e4916840
Revises: f17c7ca9afa4
Create Date: 2024-05-19 00:11:06.730421+00:00
"""
from typing import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "8b06e4916840"
down_revision: str | None = "f17c7ca9afa4"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("db_patches")
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"db_patches",
sa.Column("id", sa.INTEGER(), nullable=False),
sa.Column("current", sa.VARCHAR(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###

4
entrypoint.sh Executable file
View file

@ -0,0 +1,4 @@
#!/bin/sh -eu
alembic upgrade head
exec ./run "$@"

697
poetry.lock generated
View file

@ -1,697 +0,0 @@
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
[[package]]
name = "aiosqlite"
version = "0.20.0"
description = "asyncio bridge to the standard sqlite3 module"
optional = false
python-versions = ">=3.8"
files = [
{file = "aiosqlite-0.20.0-py3-none-any.whl", hash = "sha256:36a1deaca0cac40ebe32aac9977a6e2bbc7f5189f23f4a54d5908986729e5bd6"},
{file = "aiosqlite-0.20.0.tar.gz", hash = "sha256:6d35c8c256637f4672f843c31021464090805bf925385ac39473fb16eaaca3d7"},
]
[package.dependencies]
typing_extensions = ">=4.0"
[package.extras]
dev = ["attribution (==1.7.0)", "black (==24.2.0)", "coverage[toml] (==7.4.1)", "flake8 (==7.0.0)", "flake8-bugbear (==24.2.6)", "flit (==3.9.0)", "mypy (==1.8.0)", "ufmt (==2.3.0)", "usort (==1.0.8.post1)"]
docs = ["sphinx (==7.2.6)", "sphinx-mdinclude (==0.5.3)"]
[[package]]
name = "anyio"
version = "4.3.0"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.8"
files = [
{file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"},
{file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"},
]
[package.dependencies]
idna = ">=2.8"
sniffio = ">=1.1"
[package.extras]
doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
trio = ["trio (>=0.23)"]
[[package]]
name = "beautifulsoup4"
version = "4.12.3"
description = "Screen-scraping library"
optional = false
python-versions = ">=3.6.0"
files = [
{file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"},
{file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"},
]
[package.dependencies]
soupsieve = ">1.2"
[package.extras]
cchardet = ["cchardet"]
chardet = ["chardet"]
charset-normalizer = ["charset-normalizer"]
html5lib = ["html5lib"]
lxml = ["lxml"]
[[package]]
name = "certifi"
version = "2024.2.2"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
{file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
{file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
]
[[package]]
name = "click"
version = "8.1.7"
description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.7"
files = [
{file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
{file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
]
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "colorama"
version = "0.4.6"
description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
[[package]]
name = "coverage"
version = "7.5.1"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "coverage-7.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0884920835a033b78d1c73b6d3bbcda8161a900f38a488829a83982925f6c2e"},
{file = "coverage-7.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:39afcd3d4339329c5f58de48a52f6e4e50f6578dd6099961cf22228feb25f38f"},
{file = "coverage-7.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b0ceee8147444347da6a66be737c9d78f3353b0681715b668b72e79203e4a"},
{file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a9ca3f2fae0088c3c71d743d85404cec8df9be818a005ea065495bedc33da35"},
{file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd215c0c7d7aab005221608a3c2b46f58c0285a819565887ee0b718c052aa4e"},
{file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4bf0655ab60d754491004a5efd7f9cccefcc1081a74c9ef2da4735d6ee4a6223"},
{file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61c4bf1ba021817de12b813338c9be9f0ad5b1e781b9b340a6d29fc13e7c1b5e"},
{file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db66fc317a046556a96b453a58eced5024af4582a8dbdc0c23ca4dbc0d5b3146"},
{file = "coverage-7.5.1-cp310-cp310-win32.whl", hash = "sha256:b016ea6b959d3b9556cb401c55a37547135a587db0115635a443b2ce8f1c7228"},
{file = "coverage-7.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:df4e745a81c110e7446b1cc8131bf986157770fa405fe90e15e850aaf7619bc8"},
{file = "coverage-7.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:796a79f63eca8814ca3317a1ea443645c9ff0d18b188de470ed7ccd45ae79428"},
{file = "coverage-7.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fc84a37bfd98db31beae3c2748811a3fa72bf2007ff7902f68746d9757f3746"},
{file = "coverage-7.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6175d1a0559986c6ee3f7fccfc4a90ecd12ba0a383dcc2da30c2b9918d67d8a3"},
{file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fc81d5878cd6274ce971e0a3a18a8803c3fe25457165314271cf78e3aae3aa2"},
{file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:556cf1a7cbc8028cb60e1ff0be806be2eded2daf8129b8811c63e2b9a6c43bca"},
{file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9981706d300c18d8b220995ad22627647be11a4276721c10911e0e9fa44c83e8"},
{file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d7fed867ee50edf1a0b4a11e8e5d0895150e572af1cd6d315d557758bfa9c057"},
{file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef48e2707fb320c8f139424a596f5b69955a85b178f15af261bab871873bb987"},
{file = "coverage-7.5.1-cp311-cp311-win32.whl", hash = "sha256:9314d5678dcc665330df5b69c1e726a0e49b27df0461c08ca12674bcc19ef136"},
{file = "coverage-7.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fa567e99765fe98f4e7d7394ce623e794d7cabb170f2ca2ac5a4174437e90dd"},
{file = "coverage-7.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b6cf3764c030e5338e7f61f95bd21147963cf6aa16e09d2f74f1fa52013c1206"},
{file = "coverage-7.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ec92012fefebee89a6b9c79bc39051a6cb3891d562b9270ab10ecfdadbc0c34"},
{file = "coverage-7.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16db7f26000a07efcf6aea00316f6ac57e7d9a96501e990a36f40c965ec7a95d"},
{file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beccf7b8a10b09c4ae543582c1319c6df47d78fd732f854ac68d518ee1fb97fa"},
{file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8748731ad392d736cc9ccac03c9845b13bb07d020a33423fa5b3a36521ac6e4e"},
{file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7352b9161b33fd0b643ccd1f21f3a3908daaddf414f1c6cb9d3a2fd618bf2572"},
{file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7a588d39e0925f6a2bff87154752481273cdb1736270642aeb3635cb9b4cad07"},
{file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:68f962d9b72ce69ea8621f57551b2fa9c70509af757ee3b8105d4f51b92b41a7"},
{file = "coverage-7.5.1-cp312-cp312-win32.whl", hash = "sha256:f152cbf5b88aaeb836127d920dd0f5e7edff5a66f10c079157306c4343d86c19"},
{file = "coverage-7.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:5a5740d1fb60ddf268a3811bcd353de34eb56dc24e8f52a7f05ee513b2d4f596"},
{file = "coverage-7.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e2213def81a50519d7cc56ed643c9e93e0247f5bbe0d1247d15fa520814a7cd7"},
{file = "coverage-7.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5037f8fcc2a95b1f0e80585bd9d1ec31068a9bcb157d9750a172836e98bc7a90"},
{file = "coverage-7.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3721c2c9e4c4953a41a26c14f4cef64330392a6d2d675c8b1db3b645e31f0e"},
{file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca498687ca46a62ae590253fba634a1fe9836bc56f626852fb2720f334c9e4e5"},
{file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cdcbc320b14c3e5877ee79e649677cb7d89ef588852e9583e6b24c2e5072661"},
{file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:57e0204b5b745594e5bc14b9b50006da722827f0b8c776949f1135677e88d0b8"},
{file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fe7502616b67b234482c3ce276ff26f39ffe88adca2acf0261df4b8454668b4"},
{file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9e78295f4144f9dacfed4f92935fbe1780021247c2fabf73a819b17f0ccfff8d"},
{file = "coverage-7.5.1-cp38-cp38-win32.whl", hash = "sha256:1434e088b41594baa71188a17533083eabf5609e8e72f16ce8c186001e6b8c41"},
{file = "coverage-7.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:0646599e9b139988b63704d704af8e8df7fa4cbc4a1f33df69d97f36cb0a38de"},
{file = "coverage-7.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4cc37def103a2725bc672f84bd939a6fe4522310503207aae4d56351644682f1"},
{file = "coverage-7.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc0b4d8bfeabd25ea75e94632f5b6e047eef8adaed0c2161ada1e922e7f7cece"},
{file = "coverage-7.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d0a0f5e06881ecedfe6f3dd2f56dcb057b6dbeb3327fd32d4b12854df36bf26"},
{file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9735317685ba6ec7e3754798c8871c2f49aa5e687cc794a0b1d284b2389d1bd5"},
{file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d21918e9ef11edf36764b93101e2ae8cc82aa5efdc7c5a4e9c6c35a48496d601"},
{file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c3e757949f268364b96ca894b4c342b41dc6f8f8b66c37878aacef5930db61be"},
{file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:79afb6197e2f7f60c4824dd4b2d4c2ec5801ceb6ba9ce5d2c3080e5660d51a4f"},
{file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1d0d98d95dd18fe29dc66808e1accf59f037d5716f86a501fc0256455219668"},
{file = "coverage-7.5.1-cp39-cp39-win32.whl", hash = "sha256:1cc0fe9b0b3a8364093c53b0b4c0c2dd4bb23acbec4c9240b5f284095ccf7981"},
{file = "coverage-7.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:dde0070c40ea8bb3641e811c1cfbf18e265d024deff6de52c5950677a8fb1e0f"},
{file = "coverage-7.5.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:6537e7c10cc47c595828b8a8be04c72144725c383c4702703ff4e42e44577312"},
{file = "coverage-7.5.1.tar.gz", hash = "sha256:54de9ef3a9da981f7af93eafde4ede199e0846cd819eb27c88e2b712aae9708c"},
]
[package.extras]
toml = ["tomli"]
[[package]]
name = "greenlet"
version = "3.0.3"
description = "Lightweight in-process concurrent programming"
optional = false
python-versions = ">=3.7"
files = [
{file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"},
{file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"},
{file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"},
{file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"},
{file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"},
{file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"},
{file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"},
{file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"},
{file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"},
{file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"},
{file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"},
{file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"},
{file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"},
{file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"},
{file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"},
{file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"},
{file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"},
{file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"},
{file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"},
{file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"},
{file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"},
{file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"},
{file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"},
{file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"},
{file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"},
{file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"},
{file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"},
{file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"},
{file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"},
{file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"},
{file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"},
{file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"},
{file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"},
{file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"},
{file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"},
{file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"},
{file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"},
{file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"},
{file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"},
{file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"},
{file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"},
{file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"},
{file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"},
{file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"},
{file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"},
{file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"},
{file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"},
{file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"},
{file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"},
{file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"},
{file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"},
{file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"},
{file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"},
{file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"},
{file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"},
{file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"},
{file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"},
{file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"},
]
[package.extras]
docs = ["Sphinx", "furo"]
test = ["objgraph", "psutil"]
[[package]]
name = "h11"
version = "0.14.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
optional = false
python-versions = ">=3.7"
files = [
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
]
[[package]]
name = "honcho"
version = "1.1.0"
description = "Honcho: a Python clone of Foreman. For managing Procfile-based applications."
optional = false
python-versions = "*"
files = [
{file = "honcho-1.1.0-py2.py3-none-any.whl", hash = "sha256:a4d6e3a88a7b51b66351ecfc6e9d79d8f4b87351db9ad7e923f5632cc498122f"},
{file = "honcho-1.1.0.tar.gz", hash = "sha256:c5eca0bded4bef6697a23aec0422fd4f6508ea3581979a3485fc4b89357eb2a9"},
]
[package.dependencies]
colorama = {version = "*", markers = "sys_platform == \"win32\""}
[package.extras]
export = ["jinja2 (>=2.7,<3)"]
[[package]]
name = "html5lib"
version = "1.1"
description = "HTML parser based on the WHATWG HTML specification"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
files = [
{file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"},
{file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"},
]
[package.dependencies]
six = ">=1.9"
webencodings = "*"
[package.extras]
all = ["chardet (>=2.2)", "genshi", "lxml"]
chardet = ["chardet (>=2.2)"]
genshi = ["genshi"]
lxml = ["lxml"]
[[package]]
name = "httpcore"
version = "1.0.5"
description = "A minimal low-level HTTP client."
optional = false
python-versions = ">=3.8"
files = [
{file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"},
{file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"},
]
[package.dependencies]
certifi = "*"
h11 = ">=0.13,<0.15"
[package.extras]
asyncio = ["anyio (>=4.0,<5.0)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (==1.*)"]
trio = ["trio (>=0.22.0,<0.26.0)"]
[[package]]
name = "httpx"
version = "0.27.0"
description = "The next generation HTTP client."
optional = false
python-versions = ">=3.8"
files = [
{file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"},
{file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"},
]
[package.dependencies]
anyio = "*"
certifi = "*"
httpcore = "==1.*"
idna = "*"
sniffio = "*"
[package.extras]
brotli = ["brotli", "brotlicffi"]
cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (==1.*)"]
[[package]]
name = "idna"
version = "3.7"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.5"
files = [
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
]
[[package]]
name = "iniconfig"
version = "2.0.0"
description = "brain-dead simple config-ini parsing"
optional = false
python-versions = ">=3.7"
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
[[package]]
name = "nodeenv"
version = "1.8.0"
description = "Node.js virtual environment builder"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
files = [
{file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"},
{file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"},
]
[package.dependencies]
setuptools = "*"
[[package]]
name = "packaging"
version = "24.0"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.7"
files = [
{file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"},
{file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"},
]
[[package]]
name = "pluggy"
version = "1.5.0"
description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.8"
files = [
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
]
[package.extras]
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "pyright"
version = "1.1.362"
description = "Command line wrapper for pyright"
optional = false
python-versions = ">=3.7"
files = [
{file = "pyright-1.1.362-py3-none-any.whl", hash = "sha256:969957cff45154d8a45a4ab1dae5bdc8223d8bd3c64654fa608ab3194dfff319"},
{file = "pyright-1.1.362.tar.gz", hash = "sha256:6a477e448d4a07a6a0eab58b2a15a1bbed031eb3169fa809edee79cca168d83a"},
]
[package.dependencies]
nodeenv = ">=1.6.0"
[package.extras]
all = ["twine (>=3.4.1)"]
dev = ["twine (>=3.4.1)"]
[[package]]
name = "pytest"
version = "8.2.0"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "pytest-8.2.0-py3-none-any.whl", hash = "sha256:1733f0620f6cda4095bbf0d9ff8022486e91892245bb9e7d5542c018f612f233"},
{file = "pytest-8.2.0.tar.gz", hash = "sha256:d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f"},
]
[package.dependencies]
colorama = {version = "*", markers = "sys_platform == \"win32\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=1.5,<2.0"
[package.extras]
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-asyncio"
version = "0.23.6"
description = "Pytest support for asyncio"
optional = false
python-versions = ">=3.8"
files = [
{file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"},
{file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"},
]
[package.dependencies]
pytest = ">=7.0.0,<9"
[package.extras]
docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"]
[[package]]
name = "pytest-cov"
version = "5.0.0"
description = "Pytest plugin for measuring coverage."
optional = false
python-versions = ">=3.8"
files = [
{file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"},
{file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"},
]
[package.dependencies]
coverage = {version = ">=5.2.1", extras = ["toml"]}
pytest = ">=4.6"
[package.extras]
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"]
[[package]]
name = "ruff"
version = "0.4.3"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
{file = "ruff-0.4.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b70800c290f14ae6fcbb41bbe201cf62dfca024d124a1f373e76371a007454ce"},
{file = "ruff-0.4.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:08a0d6a22918ab2552ace96adeaca308833873a4d7d1d587bb1d37bae8728eb3"},
{file = "ruff-0.4.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba1f14df3c758dd7de5b55fbae7e1c8af238597961e5fb628f3de446c3c40c5"},
{file = "ruff-0.4.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:819fb06d535cc76dfddbfe8d3068ff602ddeb40e3eacbc90e0d1272bb8d97113"},
{file = "ruff-0.4.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bfc9e955e6dc6359eb6f82ea150c4f4e82b660e5b58d9a20a0e42ec3bb6342b"},
{file = "ruff-0.4.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:510a67d232d2ebe983fddea324dbf9d69b71c4d2dfeb8a862f4a127536dd4cfb"},
{file = "ruff-0.4.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9ff11cd9a092ee7680a56d21f302bdda14327772cd870d806610a3503d001f"},
{file = "ruff-0.4.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29efff25bf9ee685c2c8390563a5b5c006a3fee5230d28ea39f4f75f9d0b6f2f"},
{file = "ruff-0.4.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18b00e0bcccf0fc8d7186ed21e311dffd19761cb632241a6e4fe4477cc80ef6e"},
{file = "ruff-0.4.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:262f5635e2c74d80b7507fbc2fac28fe0d4fef26373bbc62039526f7722bca1b"},
{file = "ruff-0.4.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7363691198719c26459e08cc17c6a3dac6f592e9ea3d2fa772f4e561b5fe82a3"},
{file = "ruff-0.4.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:eeb039f8428fcb6725bb63cbae92ad67b0559e68b5d80f840f11914afd8ddf7f"},
{file = "ruff-0.4.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:927b11c1e4d0727ce1a729eace61cee88a334623ec424c0b1c8fe3e5f9d3c865"},
{file = "ruff-0.4.3-py3-none-win32.whl", hash = "sha256:25cacda2155778beb0d064e0ec5a3944dcca9c12715f7c4634fd9d93ac33fd30"},
{file = "ruff-0.4.3-py3-none-win_amd64.whl", hash = "sha256:7a1c3a450bc6539ef00da6c819fb1b76b6b065dec585f91456e7c0d6a0bbc725"},
{file = "ruff-0.4.3-py3-none-win_arm64.whl", hash = "sha256:71ca5f8ccf1121b95a59649482470c5601c60a416bf189d553955b0338e34614"},
{file = "ruff-0.4.3.tar.gz", hash = "sha256:ff0a3ef2e3c4b6d133fbedcf9586abfbe38d076041f2dc18ffb2c7e0485d5a07"},
]
[[package]]
name = "setuptools"
version = "69.5.1"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.8"
files = [
{file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"},
{file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
[[package]]
name = "sniffio"
version = "1.3.1"
description = "Sniff out which async library your code is running under"
optional = false
python-versions = ">=3.7"
files = [
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
]
[[package]]
name = "soupsieve"
version = "2.5"
description = "A modern CSS selector implementation for Beautiful Soup."
optional = false
python-versions = ">=3.8"
files = [
{file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"},
{file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"},
]
[[package]]
name = "sqlalchemy"
version = "2.0.30"
description = "Database Abstraction Library"
optional = false
python-versions = ">=3.7"
files = [
{file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3b48154678e76445c7ded1896715ce05319f74b1e73cf82d4f8b59b46e9c0ddc"},
{file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2753743c2afd061bb95a61a51bbb6a1a11ac1c44292fad898f10c9839a7f75b2"},
{file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7bfc726d167f425d4c16269a9a10fe8630ff6d14b683d588044dcef2d0f6be7"},
{file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4f61ada6979223013d9ab83a3ed003ded6959eae37d0d685db2c147e9143797"},
{file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a365eda439b7a00732638f11072907c1bc8e351c7665e7e5da91b169af794af"},
{file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bba002a9447b291548e8d66fd8c96a6a7ed4f2def0bb155f4f0a1309fd2735d5"},
{file = "SQLAlchemy-2.0.30-cp310-cp310-win32.whl", hash = "sha256:0138c5c16be3600923fa2169532205d18891b28afa817cb49b50e08f62198bb8"},
{file = "SQLAlchemy-2.0.30-cp310-cp310-win_amd64.whl", hash = "sha256:99650e9f4cf3ad0d409fed3eec4f071fadd032e9a5edc7270cd646a26446feeb"},
{file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:955991a09f0992c68a499791a753523f50f71a6885531568404fa0f231832aa0"},
{file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f69e4c756ee2686767eb80f94c0125c8b0a0b87ede03eacc5c8ae3b54b99dc46"},
{file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69c9db1ce00e59e8dd09d7bae852a9add716efdc070a3e2068377e6ff0d6fdaa"},
{file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1429a4b0f709f19ff3b0cf13675b2b9bfa8a7e79990003207a011c0db880a13"},
{file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:efedba7e13aa9a6c8407c48facfdfa108a5a4128e35f4c68f20c3407e4376aa9"},
{file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16863e2b132b761891d6c49f0a0f70030e0bcac4fd208117f6b7e053e68668d0"},
{file = "SQLAlchemy-2.0.30-cp311-cp311-win32.whl", hash = "sha256:2ecabd9ccaa6e914e3dbb2aa46b76dede7eadc8cbf1b8083c94d936bcd5ffb49"},
{file = "SQLAlchemy-2.0.30-cp311-cp311-win_amd64.whl", hash = "sha256:0b3f4c438e37d22b83e640f825ef0f37b95db9aa2d68203f2c9549375d0b2260"},
{file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5a79d65395ac5e6b0c2890935bad892eabb911c4aa8e8015067ddb37eea3d56c"},
{file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9a5baf9267b752390252889f0c802ea13b52dfee5e369527da229189b8bd592e"},
{file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cb5a646930c5123f8461f6468901573f334c2c63c795b9af350063a736d0134"},
{file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:296230899df0b77dec4eb799bcea6fbe39a43707ce7bb166519c97b583cfcab3"},
{file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c62d401223f468eb4da32627bffc0c78ed516b03bb8a34a58be54d618b74d472"},
{file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3b69e934f0f2b677ec111b4d83f92dc1a3210a779f69bf905273192cf4ed433e"},
{file = "SQLAlchemy-2.0.30-cp312-cp312-win32.whl", hash = "sha256:77d2edb1f54aff37e3318f611637171e8ec71472f1fdc7348b41dcb226f93d90"},
{file = "SQLAlchemy-2.0.30-cp312-cp312-win_amd64.whl", hash = "sha256:b6c7ec2b1f4969fc19b65b7059ed00497e25f54069407a8701091beb69e591a5"},
{file = "SQLAlchemy-2.0.30-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a8e3b0a7e09e94be7510d1661339d6b52daf202ed2f5b1f9f48ea34ee6f2d57"},
{file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b60203c63e8f984df92035610c5fb76d941254cf5d19751faab7d33b21e5ddc0"},
{file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1dc3eabd8c0232ee8387fbe03e0a62220a6f089e278b1f0aaf5e2d6210741ad"},
{file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:40ad017c672c00b9b663fcfcd5f0864a0a97828e2ee7ab0c140dc84058d194cf"},
{file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e42203d8d20dc704604862977b1470a122e4892791fe3ed165f041e4bf447a1b"},
{file = "SQLAlchemy-2.0.30-cp37-cp37m-win32.whl", hash = "sha256:2a4f4da89c74435f2bc61878cd08f3646b699e7d2eba97144030d1be44e27584"},
{file = "SQLAlchemy-2.0.30-cp37-cp37m-win_amd64.whl", hash = "sha256:b6bf767d14b77f6a18b6982cbbf29d71bede087edae495d11ab358280f304d8e"},
{file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc0c53579650a891f9b83fa3cecd4e00218e071d0ba00c4890f5be0c34887ed3"},
{file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:311710f9a2ee235f1403537b10c7687214bb1f2b9ebb52702c5aa4a77f0b3af7"},
{file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:408f8b0e2c04677e9c93f40eef3ab22f550fecb3011b187f66a096395ff3d9fd"},
{file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37a4b4fb0dd4d2669070fb05b8b8824afd0af57587393015baee1cf9890242d9"},
{file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a943d297126c9230719c27fcbbeab57ecd5d15b0bd6bfd26e91bfcfe64220621"},
{file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a089e218654e740a41388893e090d2e2c22c29028c9d1353feb38638820bbeb"},
{file = "SQLAlchemy-2.0.30-cp38-cp38-win32.whl", hash = "sha256:fa561138a64f949f3e889eb9ab8c58e1504ab351d6cf55259dc4c248eaa19da6"},
{file = "SQLAlchemy-2.0.30-cp38-cp38-win_amd64.whl", hash = "sha256:7d74336c65705b986d12a7e337ba27ab2b9d819993851b140efdf029248e818e"},
{file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8c62fe2480dd61c532ccafdbce9b29dacc126fe8be0d9a927ca3e699b9491a"},
{file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2383146973a15435e4717f94c7509982770e3e54974c71f76500a0136f22810b"},
{file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8409de825f2c3b62ab15788635ccaec0c881c3f12a8af2b12ae4910a0a9aeef6"},
{file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0094c5dc698a5f78d3d1539853e8ecec02516b62b8223c970c86d44e7a80f6c7"},
{file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:edc16a50f5e1b7a06a2dcc1f2205b0b961074c123ed17ebda726f376a5ab0953"},
{file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f7703c2010355dd28f53deb644a05fc30f796bd8598b43f0ba678878780b6e4c"},
{file = "SQLAlchemy-2.0.30-cp39-cp39-win32.whl", hash = "sha256:1f9a727312ff6ad5248a4367358e2cf7e625e98b1028b1d7ab7b806b7d757513"},
{file = "SQLAlchemy-2.0.30-cp39-cp39-win_amd64.whl", hash = "sha256:a0ef36b28534f2a5771191be6edb44cc2673c7b2edf6deac6562400288664221"},
{file = "SQLAlchemy-2.0.30-py3-none-any.whl", hash = "sha256:7108d569d3990c71e26a42f60474b4c02c8586c4681af5fd67e51a044fdea86a"},
{file = "SQLAlchemy-2.0.30.tar.gz", hash = "sha256:2b1708916730f4830bc69d6f49d37f7698b5bd7530aca7f04f785f8849e95255"},
]
[package.dependencies]
aiosqlite = {version = "*", optional = true, markers = "extra == \"aiosqlite\""}
greenlet = {version = "!=0.4.17", optional = true, markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\" or extra == \"aiosqlite\""}
typing-extensions = {version = ">=4.6.0", optional = true, markers = "extra == \"aiosqlite\""}
[package.extras]
aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"]
aioodbc = ["aioodbc", "greenlet (!=0.4.17)"]
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"]
mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"]
mssql = ["pyodbc"]
mssql-pymssql = ["pymssql"]
mssql-pyodbc = ["pyodbc"]
mypy = ["mypy (>=0.910)"]
mysql = ["mysqlclient (>=1.4.0)"]
mysql-connector = ["mysql-connector-python"]
oracle = ["cx_oracle (>=8)"]
oracle-oracledb = ["oracledb (>=1.0.1)"]
postgresql = ["psycopg2 (>=2.7)"]
postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
postgresql-psycopg = ["psycopg (>=3.0.7)"]
postgresql-psycopg2binary = ["psycopg2-binary"]
postgresql-psycopg2cffi = ["psycopg2cffi"]
postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
pymysql = ["pymysql"]
sqlcipher = ["sqlcipher3_binary"]
[[package]]
name = "starlette"
version = "0.37.2"
description = "The little ASGI library that shines."
optional = false
python-versions = ">=3.8"
files = [
{file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"},
{file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"},
]
[package.dependencies]
anyio = ">=3.4.0,<5"
[package.extras]
full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"]
[[package]]
name = "typing-extensions"
version = "4.11.0"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
{file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"},
{file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"},
]
[[package]]
name = "ulid-py"
version = "1.1.0"
description = "Universally Unique Lexicographically Sortable Identifier"
optional = false
python-versions = "*"
files = [
{file = "ulid-py-1.1.0.tar.gz", hash = "sha256:dc6884be91558df077c3011b9fb0c87d1097cb8fc6534b11f310161afd5738f0"},
{file = "ulid_py-1.1.0-py2.py3-none-any.whl", hash = "sha256:b56a0f809ef90d6020b21b89a87a48edc7c03aea80e5ed5174172e82d76e3987"},
]
[[package]]
name = "uvicorn"
version = "0.29.0"
description = "The lightning-fast ASGI server."
optional = false
python-versions = ">=3.8"
files = [
{file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"},
{file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"},
]
[package.dependencies]
click = ">=7.0"
h11 = ">=0.8"
[package.extras]
standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
[[package]]
name = "webencodings"
version = "0.5.1"
description = "Character encoding aliases for legacy web content"
optional = false
python-versions = "*"
files = [
{file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"},
{file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"},
]
[metadata]
lock-version = "2.0"
python-versions = "^3.12"
content-hash = "038fed338d6b75c17eb8eb88d36c2411ff936dab23887b70594e5ba1da518451"

View file

@ -1,48 +1,37 @@
[project] [project]
name = "unwind" name = "unwind"
requires-python = ">=3.12"
[tool.poetry]
name = "unwind"
version = "0" version = "0"
description = "" description = ""
authors = ["ducklet <ducklet@noreply.code.dumpr.org>"] authors = [{ name = "ducklet", email = "ducklet@noreply.code.dumpr.org" }]
license = "LOL" requires-python = ">=3.13"
license = "blessing"
dependencies = [
"beautifulsoup4>=4.9.3,<5",
"html5lib~=1.1",
"starlette>=0.46.2",
"ulid-py>=1.1.0,<2",
"uvicorn>=0.34.2",
"httpx>=0.28.1",
"sqlalchemy[aiosqlite]~=2.0",
"alembic>=1.13.1,<2",
]
[tool.poetry.dependencies] [dependency-groups]
python = "^3.12" dev = [
beautifulsoup4 = "^4.9.3" "pytest",
html5lib = "^1.1" "pyright",
starlette = "^0.37.2" "pytest-asyncio",
ulid-py = "^1.1.0" "pytest-cov",
uvicorn = "^0.29.0" "ruff",
httpx = "^0.27.0" "honcho",
sqlalchemy = {version = "^2.0", extras = ["aiosqlite"]} ]
[tool.poetry.group.build.dependencies]
# When we run poetry export, typing-extensions is a transient dependency via
# sqlalchemy, but the hash won't be included in the requirements.txt.
# By making it a direct dependency we can fix this issue, otherwise this could
# be removed.
typing-extensions = "*"
[tool.poetry.group.dev]
optional = true
[tool.poetry.group.dev.dependencies]
pytest = "*"
pyright = "*"
pytest-asyncio = "*"
pytest-cov = "*"
ruff = "*"
honcho = "*"
[build-system] [build-system]
requires = ["poetry-core>=1.0.0"] requires = ["hatchling"]
build-backend = "poetry.core.masonry.api" build-backend = "hatchling.build"
[tool.pyright] [tool.pyright]
pythonVersion = "3.12" pythonVersion = "3.13"
[tool.ruff.lint] [tool.ruff.lint]
select = [ select = [

View file

@ -4,4 +4,6 @@ cd "$RUN_DIR"
[ -z "${DEBUG:-}" ] || set -x [ -z "${DEBUG:-}" ] || set -x
exec poetry run python -m unwind "$@" python_bin=$(uv run command -v python)
exec "$python_bin" -m unwind "$@"

View file

@ -33,9 +33,12 @@ githash_long=$(git rev-parse HEAD)
version="$githash_short" version="$githash_short"
echo "$version" >"$builddir"/version echo "$version" >"$builddir"/version
poetry export \ uv export \
--with=build \ --frozen \
--output="$builddir"/requirements.txt --format=requirements.txt \
--no-dev \
--no-emit-project \
>"$builddir"/requirements.txt
$DOCKER_BIN build \ $DOCKER_BIN build \
--pull \ --pull \

View file

@ -4,7 +4,7 @@ cd "$RUN_DIR"
[ -z "${DEBUG:-}" ] || set -x [ -z "${DEBUG:-}" ] || set -x
poetry install --with=dev --sync uv sync --frozen
cd unwind-ui cd unwind-ui
npm ci npm ci

View file

@ -4,7 +4,7 @@ cd "$RUN_DIR"
[ -z "${DEBUG:-}" ] || set -x [ -z "${DEBUG:-}" ] || set -x
poetry run ruff check --fix . ||: uv run ruff check --fix . ||:
poetry run ruff format . uv run ruff format .
poetry run pyright uv run pyright

View file

@ -4,8 +4,10 @@ cd "$RUN_DIR"
[ -z "${DEBUG:-}" ] || set -x [ -z "${DEBUG:-}" ] || set -x
echo '# Poetry:' echo '# Uv:'
poetry show --outdated --top-level --with=build,dev uv tree --outdated \
| grep --color=never ' (latest: ' \
| sed -E 's/^[│├└─ ]*//'
echo ' echo '
# Npm:' # Npm:'

View file

@ -11,4 +11,4 @@ trap 'rm "$dbfile" "${dbfile}-shm" "${dbfile}-wal"' EXIT TERM INT QUIT
[ -z "${DEBUG:-}" ] || set -x [ -z "${DEBUG:-}" ] || set -x
UNWIND_STORAGE="$dbfile" \ UNWIND_STORAGE="$dbfile" \
exec poetry run pytest --cov "$@" exec uv run pytest --cov "$@"

View file

@ -1,4 +0,0 @@
#!/bin/sh -eu
cd "$RUN_DIR"
exec "$RUN_BIN" tests -x --cov-report html:cov_html

View file

@ -13,34 +13,23 @@ cd "$RUN_DIR"
[ -z "${DEBUG:-}" ] || set -x [ -z "${DEBUG:-}" ] || set -x
# Poetry # Uv
poetry update --with=build,dev uv sync --upgrade
poetry show --outdated --top-level \ uv tree --outdated --no-dev \
| cut -d ' ' -f 1 \ | sed -nE 's/.*─ ([^ ]+) (v[^ ]+) \(latest: v([^)]+)\)/\1>=\3/p' \
| while read -r pkg; do | xargs uv add
poetry add "$pkg@latest"
done
poetry show --outdated --top-level --only=build \ uv tree --outdated --only-dev \
| cut -d ' ' -f 1 \ | sed -nE 's/.*─ ([^ ]+) (v[^ ]+) \(latest: v([^)]+)\)/\1>=\3/p' \
| while read -r pkg; do | xargs uv add --dev
poetry add --group=build "$pkg@latest"
done
poetry show --outdated --top-level --only=dev \
| cut -d ' ' -f 1 \
| while read -r pkg; do
poetry add --group=dev "$pkg@latest"
done
# Npm # Npm
cd unwind-ui cd unwind-ui
npm update npm update
npm install $(npm outdated --json --silent | jq -r 'keys|map("\(.)@latest")|@sh')
npm outdated --json --silent \ npm outdated --json --silent \
| jq -r 'keys|map(@sh"\(.)@latest")|join("\n")' \ | jq -r 'keys|map(@sh"\(.)@latest")|join("\n")' \

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -20,14 +20,6 @@ def a_movie(**kwds) -> models.Movie:
return models.Movie(**args) return models.Movie(**args)
@pytest.mark.asyncio
async def test_current_patch_level(conn: db.Connection):
patch_level = "some-patch-level"
assert patch_level != await db.current_patch_level(conn)
await db.set_current_patch_level(conn, patch_level)
assert patch_level == await db.current_patch_level(conn)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get(conn: db.Connection): async def test_get(conn: db.Connection):
m1 = a_movie() m1 = a_movie()

View file

@ -30,29 +30,43 @@ def test_score_conversion(score: int):
assert score == score_from_imdb_rating(imdb_rating_from_score(score)) assert score == score_from_imdb_rating(imdb_rating_from_score(score))
@pytest.mark.parametrize(
"fixture",
(
("most_popular_100.html.bz2"),
("most_popular_100-20240714.html.bz2"),
),
)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_load_most_popular_100(monkeypatch): async def test_load_most_popular_100(monkeypatch, fixture: str):
with bz2.open(fixturesdir / "most_popular_100.html.bz2", "rb") as f: with bz2.open(fixturesdir / fixture, "rb") as f:
html = f.read() html = f.read()
soup = bs4.BeautifulSoup(html, "html5lib") soup = bs4.BeautifulSoup(html, "html5lib")
monkeypatch.setattr(imdb, "asoup_from_url", AsyncMock(return_value=soup)) monkeypatch.setattr(imdb, "asoup_from_url", AsyncMock(return_value=soup))
movie_ids = await imdb.load_most_popular_100() movie_ids = await imdb.load_most_popular_100()
assert len(movie_ids) == 100 assert len(set(movie_ids)) == 100
assert all(id_.startswith("tt") for id_ in movie_ids) assert all(id_.startswith("tt") for id_ in movie_ids)
@pytest.mark.parametrize(
"fixture",
(
("bottom_100.html.bz2"),
("bottom_100-20240714.html.bz2"),
),
)
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_load_bottom_100(monkeypatch): async def test_load_bottom_100(monkeypatch, fixture: str):
with bz2.open(fixturesdir / "bottom_100.html.bz2", "rb") as f: with bz2.open(fixturesdir / fixture, "rb") as f:
html = f.read() html = f.read()
soup = bs4.BeautifulSoup(html, "html5lib") soup = bs4.BeautifulSoup(html, "html5lib")
monkeypatch.setattr(imdb, "asoup_from_url", AsyncMock(return_value=soup)) monkeypatch.setattr(imdb, "asoup_from_url", AsyncMock(return_value=soup))
movie_ids = await imdb.load_bottom_100() movie_ids = await imdb.load_bottom_100()
assert len(movie_ids) == 100 assert len(set(movie_ids)) == 100
assert all(id_.startswith("tt") for id_ in movie_ids) assert all(id_.startswith("tt") for id_ in movie_ids)
@ -120,7 +134,7 @@ async def test_load_ratings_page_20240510(monkeypatch):
if rating.movie.imdb_id == item["imdb_id"]: if rating.movie.imdb_id == item["imdb_id"]:
rating_dict = {key: getattr(rating.movie, key) for key in item.keys()} rating_dict = {key: getattr(rating.movie, key) for key in item.keys()}
return rating_dict return rating_dict
raise AssertionError() raise AssertionError(f"{item['imdb_id']} not found in page.ratings")
a_movie = { a_movie = {
"title": "Kung Fu Panda 4", "title": "Kung Fu Panda 4",
@ -128,8 +142,9 @@ async def test_load_ratings_page_20240510(monkeypatch):
"media_type": "Movie", "media_type": "Movie",
"imdb_id": "tt21692408", "imdb_id": "tt21692408",
"imdb_score": 59, "imdb_score": 59,
"imdb_votes": 36000, "imdb_votes": 36069,
"runtime": 94, "runtime": 94,
"genres": {"Action", "Adventure", "Animation"},
} }
assert a_movie == movie(a_movie) assert a_movie == movie(a_movie)
@ -139,7 +154,8 @@ async def test_load_ratings_page_20240510(monkeypatch):
"media_type": "TV Series", "media_type": "TV Series",
"imdb_id": "tt8888540", "imdb_id": "tt8888540",
"imdb_score": 64, "imdb_score": 64,
"imdb_votes": 6000, "imdb_votes": 6044,
"genres": {"Drama"},
} }
assert a_running_tvseries == movie(a_running_tvseries) assert a_running_tvseries == movie(a_running_tvseries)
@ -149,29 +165,94 @@ async def test_load_ratings_page_20240510(monkeypatch):
"media_type": "TV Series", "media_type": "TV Series",
"imdb_id": "tt0072500", "imdb_id": "tt0072500",
"imdb_score": 87, "imdb_score": 87,
"imdb_votes": 100000, "imdb_votes": 100261,
"genres": {"Comedy"},
} }
assert a_finished_tvseries == movie(a_finished_tvseries) assert a_finished_tvseries == movie(a_finished_tvseries)
a_tvepisode = { a_tvepisode = {
"title": "Columbo / No Time to Die", "title": "Columbo / No Time to Die",
"original_title": None, "original_title": "Columbo / No Time to Die",
"release_year": 1992, "release_year": 1992,
"media_type": "TV Episode", "media_type": "TV Episode",
"imdb_id": "tt0103987", "imdb_id": "tt0103987",
"imdb_score": 59, "imdb_score": 59,
"imdb_votes": 2100, "imdb_votes": 2122,
"runtime": 98, "runtime": 98,
"genres": {"Crime", "Drama", "Mystery"},
} }
assert a_tvepisode == movie(a_tvepisode) assert a_tvepisode == movie(a_tvepisode)
a_videogame = { a_videogame = {
"title": "Alan Wake", "title": "Alan Wake",
"original_title": None, "original_title": "Alan Wake",
"release_year": 2010, "release_year": 2010,
"media_type": "Video Game", "media_type": "Video Game",
"imdb_id": "tt0466662", "imdb_id": "tt0466662",
"imdb_score": 82, # The data from __NEXT_DATA__ is wrong, the actual values should be:
"imdb_votes": 7300, # "imdb_score": 82,
# "imdb_votes": 7300,
# "genres": {"Action", "Adventure", "Horror"},
"imdb_score": 67, # Wrong value, but correctly parsed from __NEXT_DATA__
"imdb_votes": 11655, # Wrong value, but correctly parsed from __NEXT_DATA__
"genres": {"Comedy", "Crime", "Drama"}, # Wrong value
} }
assert a_videogame == movie(a_videogame) assert a_videogame == movie(a_videogame)
@pytest.mark.asyncio
async def test_load_ratings_page_20240720(monkeypatch):
with bz2.open(fixturesdir / "ratings-ur655321-20240720.html.bz2", "rb") as f:
html = f.read()
soup = bs4.BeautifulSoup(html, "html5lib")
monkeypatch.setattr(imdb, "asoup_from_url", AsyncMock(return_value=soup))
with bz2.open(fixturesdir / "ratings-ur655321-20240720.gql.json.bz2", "rb") as f:
jsonstr = f.read()
async with imdb.asession() as s:
monkeypatch.setattr(s, "post", AsyncMock(return_value=_mock_response(jsonstr)))
page = await imdb._load_ratings_page("fakeurl", "ur655321")
assert len(page.ratings) == 100
assert page.imdb_user_id is not None
assert page.imdb_user_id == "ur655321"
assert page.imdb_user_name == "AlexUltra"
assert page.next_page_url is None, "not supported for new ratings page"
def movie(item: dict):
for rating in page.ratings:
assert rating.movie
if rating.movie.imdb_id == item["imdb_id"]:
rating_dict = {key: getattr(rating.movie, key) for key in item.keys()}
return rating_dict
raise AssertionError(f"{item['imdb_id']} not found in page.ratings")
a_movie = {
"title": "Kung Fu Panda 4",
"release_year": 2024,
"media_type": "Movie",
"imdb_id": "tt21692408",
"imdb_score": 59,
"imdb_votes": 48018,
"runtime": 94,
}
assert a_movie == movie(a_movie)
a_running_tvseries = {
"title": "Palm Royale",
"release_year": 2024,
"media_type": "TV Series",
"imdb_id": "tt8888540",
"imdb_score": 63,
"imdb_votes": 9458,
}
assert a_running_tvseries == movie(a_running_tvseries)
a_finished_tvseries = {
"title": "Fawlty Towers",
"release_year": 1975,
"media_type": "TV Series",
"imdb_id": "tt0072500",
"imdb_score": 87,
"imdb_votes": 100860,
}
assert a_finished_tvseries == movie(a_finished_tvseries)

View file

@ -32,6 +32,83 @@ def admin_client() -> TestClient:
return client return client
@pytest.mark.asyncio
async def test_get_ratings_for_group_with_awards(
conn: db.Connection, unauthorized_client: TestClient
):
user = models.User(
imdb_id="ur12345678",
name="user-1",
secret="secret-1", # noqa: S106
groups=[],
)
group = models.Group(
name="group-1",
users=[models.GroupUser(id=str(user.id), name=user.name)],
)
user.groups = [models.UserGroup(id=str(group.id), access="r")]
path = app.url_path_for("get_ratings_for_group", group_id=str(group.id))
await db.add(conn, user)
await db.add(conn, group)
movie1 = models.Movie(
title="test movie",
release_year=2013,
media_type="Movie",
imdb_id="tt12345678",
genres={"genre-1"},
)
await db.add(conn, movie1)
movie2 = models.Movie(
title="test movie 2",
release_year=2014,
media_type="Movie",
imdb_id="tt12345679",
genres={"genre-2"},
)
await db.add(conn, movie2)
award1 = models.Award(
movie_id=movie1.id, category="imdb-top-250", details='{"position":23}'
)
award2 = models.Award(
movie_id=movie2.id, category="imdb-top-250", details='{"position":99}'
)
award3 = models.Award(
movie_id=movie1.id, category="oscars", details='{"name":"Best Visual Effects"}'
)
award4 = models.Award(
movie_id=movie1.id, category="oscars", details='{"name":"Best Picture"}'
)
await db.add(conn, award1, award2, award3, award4)
rating = models.Rating(
movie_id=movie1.id, user_id=user.id, score=66, rating_date=datetime.now(tz=UTC)
)
await db.add(conn, rating)
rating_aggregate = {
"canonical_title": movie1.title,
"imdb_score": movie1.imdb_score,
"imdb_votes": movie1.imdb_votes,
"link": imdb.movie_url(movie1.imdb_id),
"media_type": movie1.media_type,
"original_title": movie1.original_title,
"user_scores": [rating.score],
"year": movie1.release_year,
"awards": [
"imdb-top-250:23",
"oscars:Best Picture",
"oscars:Best Visual Effects",
],
}
resp = unauthorized_client.get(path)
assert resp.status_code == 200
assert resp.json() == [rating_aggregate]
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_ratings_for_group( async def test_get_ratings_for_group(
conn: db.Connection, unauthorized_client: TestClient conn: db.Connection, unauthorized_client: TestClient
@ -82,6 +159,7 @@ async def test_get_ratings_for_group(
"original_title": movie.original_title, "original_title": movie.original_title,
"user_scores": [rating.score], "user_scores": [rating.score],
"year": movie.release_year, "year": movie.release_year,
"awards": [],
} }
resp = unauthorized_client.get(path) resp = unauthorized_client.get(path)
@ -158,6 +236,7 @@ async def test_list_movies(
"original_title": m.original_title, "original_title": m.original_title,
"user_scores": [], "user_scores": [],
"year": m.release_year, "year": m.release_year,
"awards": [],
} }
response = authorized_client.get(path, params={"imdb_id": m.imdb_id}) response = authorized_client.get(path, params={"imdb_id": m.imdb_id})

File diff suppressed because it is too large Load diff

View file

@ -16,7 +16,7 @@
"@vue/compiler-sfc": "^3.0.5", "@vue/compiler-sfc": "^3.0.5",
"prettier": "^3.2.5", "prettier": "^3.2.5",
"typescript": "^5.4.5", "typescript": "^5.4.5",
"vite": "^5.2.11", "vite": "^6.3.5",
"vue-tsc": "^2.0.16" "vue-tsc": "^2.0.16"
}, },
"prettier": { "prettier": {

View file

@ -1,149 +1,26 @@
import argparse import argparse
import asyncio import asyncio
import logging import logging
import secrets import sys
from base64 import b64encode
from pathlib import Path
from . import config, db, models, utils from . import cli, config
from .db import close_connection_pool, open_connection_pool
from .imdb import refresh_user_ratings_from_imdb
from .imdb_import import download_datasets, import_from_file
log = logging.getLogger(__name__) log = logging.getLogger(__package__)
async def run_add_user(user_id: str, name: str, overwrite_existing: bool):
if not user_id.startswith("ur"):
raise ValueError(f"Invalid IMDb user ID: {user_id!a}")
await open_connection_pool()
async with db.new_connection() as conn:
user = await db.get(conn, models.User, imdb_id=user_id)
if user is not None:
if overwrite_existing:
log.warning("⚠️ Overwriting existing user: %a", user)
else:
log.error("❌ User already exists: %a", user)
return
secret = secrets.token_bytes()
user = models.User(name=name, imdb_id=user_id, secret=utils.phc_scrypt(secret))
async with db.transaction() as conn:
await db.add_or_update_user(conn, user)
user_data = {
"secret": b64encode(secret),
"user": models.asplain(user),
}
log.info("✨ User created: %a", user_data)
await close_connection_pool()
async def run_load_user_ratings_from_imdb():
await open_connection_pool()
i = 0
async for _ in refresh_user_ratings_from_imdb():
i += 1
log.info("✨ Imported %s new ratings.", i)
await close_connection_pool()
async def run_import_imdb_dataset(basics_path: Path, ratings_path: Path):
await open_connection_pool()
await import_from_file(basics_path=basics_path, ratings_path=ratings_path)
await close_connection_pool()
async def run_download_imdb_dataset(basics_path: Path, ratings_path: Path):
await download_datasets(basics_path=basics_path, ratings_path=ratings_path)
def getargs(): def getargs():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser(prog="unwind", allow_abbrev=False)
commands = parser.add_subparsers(required=True) commands = parser.add_subparsers(title="commands", metavar="COMMAND", dest="mode")
parser_import_imdb_dataset = commands.add_parser( for module in cli.modules:
"import-imdb-dataset", help_, *descr = module.help.splitlines()
help="Import IMDb datasets.", cmd = commands.add_parser(
description=""" module.name,
Import IMDb datasets. help=help_,
New datasets available from https://www.imdb.com/interfaces/. description="\n".join(descr) or help_,
""", allow_abbrev=False,
)
parser_import_imdb_dataset.add_argument(
dest="mode",
action="store_const",
const="import-imdb-dataset",
)
parser_import_imdb_dataset.add_argument(
"--basics", metavar="basics_file.tsv.gz", type=Path, required=True
)
parser_import_imdb_dataset.add_argument(
"--ratings", metavar="ratings_file.tsv.gz", type=Path, required=True
)
parser_download_imdb_dataset = commands.add_parser(
"download-imdb-dataset",
help="Download IMDb datasets.",
description="""
Download IMDb datasets.
""",
)
parser_download_imdb_dataset.add_argument(
dest="mode",
action="store_const",
const="download-imdb-dataset",
)
parser_download_imdb_dataset.add_argument(
"--basics", metavar="basics_file.tsv.gz", type=Path, required=True
)
parser_download_imdb_dataset.add_argument(
"--ratings", metavar="ratings_file.tsv.gz", type=Path, required=True
)
parser_load_user_ratings_from_imdb = commands.add_parser(
"load-user-ratings-from-imdb",
help="Load user ratings from imdb.com.",
description="""
Refresh user ratings for all registered users live from IMDb's website.
""",
)
parser_load_user_ratings_from_imdb.add_argument(
dest="mode",
action="store_const",
const="load-user-ratings-from-imdb",
)
parser_add_user = commands.add_parser(
"add-user",
help="Add a new user.",
description="""
Add a new user.
""",
)
parser_add_user.add_argument(
dest="mode",
action="store_const",
const="add-user",
)
parser_add_user.add_argument("--name", required=True)
parser_add_user.add_argument("--imdb-id", required=True)
parser_add_user.add_argument(
"--overwrite-existing",
action="store_true",
help="Allow overwriting an existing user. WARNING: This will reset the user's password!",
) )
module.add_args(cmd)
try: try:
args = parser.parse_args() args = parser.parse_args()
@ -151,6 +28,10 @@ def getargs():
parser.print_usage() parser.print_usage()
raise raise
if args.mode is None:
parser.print_help()
sys.exit(1)
return args return args
@ -158,23 +39,16 @@ def main():
logging.basicConfig( logging.basicConfig(
format="%(asctime)s.%(msecs)03d [%(name)s:%(process)d] %(levelname)s: %(message)s", format="%(asctime)s.%(msecs)03d [%(name)s:%(process)d] %(levelname)s: %(message)s",
datefmt="%H:%M:%S", datefmt="%H:%M:%S",
level=config.loglevel, # level=config.loglevel,
) )
log.setLevel(config.loglevel)
log.debug(f"Log level: {config.loglevel}") log.debug(f"Log level: {config.loglevel}")
try:
args = getargs() args = getargs()
except Exception:
return
if args.mode == "load-user-ratings-from-imdb": modes = {m.name: m.main for m in cli.modules}
asyncio.run(run_load_user_ratings_from_imdb()) if handler := modes.get(args.mode):
elif args.mode == "add-user": asyncio.run(handler(args))
asyncio.run(run_add_user(args.imdb_id, args.name, args.overwrite_existing))
elif args.mode == "import-imdb-dataset":
asyncio.run(run_import_imdb_dataset(args.basics, args.ratings))
elif args.mode == "download-imdb-dataset":
asyncio.run(run_download_imdb_dataset(args.basics, args.ratings))
main() main()

39
unwind/cli/__init__.py Normal file
View file

@ -0,0 +1,39 @@
import argparse
import importlib
from pathlib import Path
from types import ModuleType
from typing import Any, Callable, Coroutine, Iterable, Protocol, TypeGuard
type CommandHandler = Callable[[argparse.Namespace], Coroutine[Any, Any, None]]
class CliModule(Protocol):
name: str
help: str
add_args: Callable[[argparse.ArgumentParser], None]
main: CommandHandler
def _is_cli_module(m: ModuleType) -> TypeGuard[CliModule]:
return (
hasattr(m, "name")
and hasattr(m, "help")
and hasattr(m, "add_args")
and hasattr(m, "main")
)
_clidir = Path(__file__).parent
def _load_cmds() -> Iterable[CliModule]:
"""Return all CLI command modules."""
for f in _clidir.iterdir():
if f.suffix == ".py" and not f.name.startswith("__"):
m = importlib.import_module(f"{__package__}.{f.stem}")
if not _is_cli_module(m):
raise ValueError(f"Invalid CLI module: {m!a}")
yield m
modules = sorted(_load_cmds(), key=lambda m: m.name)

56
unwind/cli/add_user.py Normal file
View file

@ -0,0 +1,56 @@
import argparse
import logging
import secrets
from unwind import db, models, utils
log = logging.getLogger(__name__)
name = "add-user"
help = "Add a new user."
def add_args(cmd: argparse.ArgumentParser) -> None:
cmd.add_argument("--name", required=True)
cmd.add_argument("--imdb-id", required=True)
cmd.add_argument(
"--overwrite-existing",
action="store_true",
help="Allow overwriting an existing user. WARNING: This will reset the user's password!",
)
async def main(args: argparse.Namespace) -> None:
user_id: str = args.imdb_id
name: str = args.name
overwrite_existing: bool = args.overwrite_existing
if not user_id.startswith("ur"):
raise ValueError(f"Invalid IMDb user ID: {user_id!a}")
await db.open_connection_pool()
async with db.new_connection() as conn:
user = await db.get(conn, models.User, imdb_id=user_id)
if user is not None:
if overwrite_existing:
log.warning("⚠️ Overwriting existing user: %a", user)
else:
log.error("❌ User already exists: %a", user)
return
secret = secrets.token_bytes()
user = models.User(name=name, imdb_id=user_id, secret=utils.phc_scrypt(secret))
async with db.transaction() as conn:
await db.add_or_update_user(conn, user)
user_data = {
"secret": utils.b64encode(secret),
"user": models.asplain(user),
}
log.info("✨ User created: %a", user_data)
await db.close_connection_pool()

View file

@ -0,0 +1,24 @@
import argparse
import logging
from pathlib import Path
from unwind.imdb_import import download_datasets
log = logging.getLogger(__name__)
name = "download-imdb-dataset"
help = "Download IMDb datasets."
def add_args(cmd: argparse.ArgumentParser) -> None:
cmd.add_argument("--basics", metavar="basics_file.tsv.gz", type=Path, required=True)
cmd.add_argument(
"--ratings", metavar="ratings_file.tsv.gz", type=Path, required=True
)
async def main(args: argparse.Namespace) -> None:
basics_path: Path = args.basics
ratings_path: Path = args.ratings
await download_datasets(basics_path=basics_path, ratings_path=ratings_path)

View file

@ -0,0 +1,31 @@
import argparse
import logging
from pathlib import Path
from unwind import db
from unwind.imdb_import import import_from_file
log = logging.getLogger(__name__)
name = "import-imdb-dataset"
help = """Import IMDb datasets.
New datasets available from https://www.imdb.com/interfaces/.
"""
def add_args(cmd: argparse.ArgumentParser) -> None:
cmd.add_argument("--basics", metavar="basics_file.tsv.gz", type=Path, required=True)
cmd.add_argument(
"--ratings", metavar="ratings_file.tsv.gz", type=Path, required=True
)
async def main(args: argparse.Namespace) -> None:
basics_path: Path = args.basics
ratings_path: Path = args.ratings
await db.open_connection_pool()
await import_from_file(basics_path=basics_path, ratings_path=ratings_path)
await db.close_connection_pool()

View file

@ -0,0 +1,102 @@
import argparse
import json
import logging
from datetime import datetime
from pathlib import Path
from typing import Iterable
from unwind import db, models, types
log = logging.getLogger(__name__)
name = "import-wikidata-oscars"
help = "Import Academy awards information from a Wikidata dump."
# To generate the JSON file, run the following query
# at https://query.wikidata.org/ and export as (simpel) JSON:
"""
SELECT ?awardLabel ?filmLabel ?imdbId ?time WHERE {
?award wdt:P31 wd:Q19020.
?film wdt:P31 wd:Q11424;
p:P166 ?awardStat.
?awardStat ps:P166 ?award.
OPTIONAL {
?awardStat pq:P805 ?awardEdition.
?awardEdition wdt:P585 ?time.
?film wdt:P345 ?imdbId.
}
SERVICE wikibase:label { bd:serviceParam wikibase:language "[AUTO_LANGUAGE],en". }
}
ORDER BY DESC (?time)
"""
def add_args(cmd: argparse.ArgumentParser) -> None:
cmd.add_argument("--json-file", required=True, type=Path)
def load_awards(json_file: Path) -> Iterable[tuple[types.ImdbMovieId, models.Award]]:
with json_file.open() as fd:
data = json.load(fd)
name_prefix = "Academy Award for "
special_names = {
"Special Achievement Academy Award": "Special Achievement",
"Academy Honorary Award": "Honorary",
}
for item in data:
name = item["awardLabel"]
if name in special_names:
name = special_names[name]
elif name.startswith(name_prefix):
name = name.removeprefix(name_prefix)
else:
raise ValueError(f"Award name is unexpected: {name!a}")
# award = models.Award(category="oscars",details={"name":name},created=created)
award = models.Award(category="oscars")
# award._details = {"name": name}
award.name = name
if (datestr := item.get("time")) is not None:
award.created = datetime.fromisoformat(datestr)
if "imdbId" not in item:
log.warning("⚠️ IMDb ID missing for movie: %a", item["filmLabel"])
else:
yield item["imdbId"], award
async def remove_all_oscars(conn: db.Connection) -> None:
stmt = models.awards.delete().where(models.awards.c.category == "oscars")
await conn.execute(stmt)
async def main(args: argparse.Namespace) -> None:
await db.open_connection_pool()
json_file: Path = args.json_file
awards: dict[types.ImdbMovieId, list[models.Award]] = {}
for imdb_id, award in load_awards(json_file):
awards.setdefault(imdb_id, []).append(award)
async with db.new_connection() as conn:
imdb_ids = list(awards)
available = await db.get_movie_ids(conn, imdb_ids)
if missing := set(imdb_ids).difference(available):
log.warning(
"⚠️ File (%a) contained %i unknown movies: %a",
str(json_file),
len(missing),
missing,
)
async with db.transaction() as conn:
await remove_all_oscars(conn)
for imdb_id, unwind_id in available.items():
for award in awards[imdb_id]:
award.movie_id = unwind_id
await db.add(conn, award)
log.info(f"✨ Imported {len(available)} oscars.")
await db.close_connection_pool()

View file

@ -0,0 +1,86 @@
import argparse
import logging
from typing import Callable
from unwind import db, imdb, models
log = logging.getLogger(__name__)
name = "load-imdb-charts"
help = "Load and import charts from imdb.com."
def add_args(cmd: argparse.ArgumentParser) -> None:
cmd.add_argument(
"--select",
action="append",
dest="charts",
default=[],
choices={"top250", "bottom100", "pop100"},
help="Select which charts to refresh.",
)
async def remove_all_awards(
conn: db.Connection, category: models.AwardCategory
) -> None:
stmt = models.awards.delete().where(models.awards.c.category == category)
await conn.execute(stmt)
_award_handlers: dict[models.AwardCategory, Callable] = {
"imdb-pop-100": imdb.load_most_popular_100,
"imdb-top-250": imdb.load_top_250,
"imdb-bottom-100": imdb.load_bottom_100,
}
async def update_awards(conn: db.Connection, category: models.AwardCategory) -> None:
load_imdb_ids = _award_handlers[category]
imdb_ids = await load_imdb_ids()
available = await db.get_movie_ids(conn, imdb_ids)
if missing := set(imdb_ids).difference(available):
log.warning(
"⚠️ Charts for category (%a) contained %i unknown movies: %a",
category,
len(missing),
missing,
)
await remove_all_awards(conn, category=category)
for pos, imdb_id in enumerate(imdb_ids, 1):
if (movie_id := available.get(imdb_id)) is None:
continue
award = models.Award(
movie_id=movie_id,
category=category,
)
award.position = pos
await db.add(conn, award)
async def main(args: argparse.Namespace) -> None:
await db.open_connection_pool()
if not args.charts:
args.charts = {"top250", "bottom100", "pop100"}
if "pop100" in args.charts:
async with db.transaction() as conn:
await update_awards(conn, "imdb-pop-100")
log.info("✨ Updated most popular 100 movies.")
if "bottom100" in args.charts:
async with db.transaction() as conn:
await update_awards(conn, "imdb-bottom-100")
log.info("✨ Updated bottom 100 movies.")
if "top250" in args.charts:
async with db.transaction() as conn:
await update_awards(conn, "imdb-top-250")
log.info("✨ Updated top 250 rated movies.")
await db.close_connection_pool()

View file

@ -0,0 +1,28 @@
import argparse
import logging
from unwind import db
from unwind.imdb import refresh_user_ratings_from_imdb
log = logging.getLogger(__name__)
name = "load-user-ratings-from-imdb"
help = """Load user ratings from imdb.com.
Refresh user ratings for all registered users live from IMDb's website.
"""
def add_args(cmd: argparse.ArgumentParser) -> None:
pass
async def main(args: argparse.Namespace) -> None:
await db.open_connection_pool()
i = 0
async for _ in refresh_user_ratings_from_imdb():
i += 1
log.info("✨ Imported %s new ratings.", i)
await db.close_connection_pool()

View file

@ -1,21 +1,25 @@
import contextlib import contextlib
import logging import logging
from pathlib import Path from pathlib import Path
from typing import Any, AsyncGenerator, Iterable, Literal, Sequence, Type, TypeVar from typing import Any, AsyncGenerator, Iterable, Literal, Sequence, Type
import sqlalchemy as sa import sqlalchemy as sa
from sqlalchemy.dialects.sqlite import insert
from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine, create_async_engine from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine, create_async_engine
import alembic.command
import alembic.config
import alembic.migration
from . import config from . import config
from .models import ( from .models import (
Award,
Model, Model,
Movie, Movie,
Progress, Progress,
Rating, Rating,
User, User,
asplain, asplain,
db_patches, awards,
fromplain, fromplain,
metadata, metadata,
movies, movies,
@ -24,15 +28,33 @@ from .models import (
ratings, ratings,
utcnow, utcnow,
) )
from .types import ULID from .types import ULID, ImdbMovieId, MovieId, UserIdStr
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
T = TypeVar("T")
_engine: AsyncEngine | None = None _engine: AsyncEngine | None = None
type Connection = AsyncConnection type Connection = AsyncConnection
_project_dir = Path(__file__).parent.parent
_alembic_ini = _project_dir / "alembic.ini"
def _init(conn: sa.Connection) -> None:
# See https://alembic.sqlalchemy.org/en/latest/cookbook.html#building-an-up-to-date-database-from-scratch
context = alembic.migration.MigrationContext.configure(conn)
heads = context.get_current_heads()
is_empty_db = not heads # We consider a DB empty if Alembic hasn't touched it yet.
if is_empty_db:
log.info("⚡️ Initializing empty database.")
metadata.create_all(conn)
# We pass our existing connection to Alembic's env.py, to avoid running another asyncio loop there.
alembic_cfg = alembic.config.Config(_alembic_ini)
alembic_cfg.attributes["connection"] = conn
alembic.command.stamp(alembic_cfg, "head")
async def open_connection_pool() -> None: async def open_connection_pool() -> None:
"""Open the DB connection pool. """Open the DB connection pool.
@ -41,11 +63,7 @@ async def open_connection_pool() -> None:
""" """
async with transaction() as conn: async with transaction() as conn:
await conn.execute(sa.text("PRAGMA journal_mode=WAL")) await conn.execute(sa.text("PRAGMA journal_mode=WAL"))
await conn.run_sync(_init)
await conn.run_sync(metadata.create_all, tables=[db_patches])
async with new_connection() as conn:
await apply_db_patches(conn)
async def close_connection_pool() -> None: async def close_connection_pool() -> None:
@ -65,65 +83,7 @@ async def close_connection_pool() -> None:
await engine.dispose() await engine.dispose()
async def current_patch_level(conn: Connection, /) -> str: async def vacuum(conn: Connection, /) -> None:
query = sa.select(db_patches.c.current)
current = await conn.scalar(query)
return current or ""
async def set_current_patch_level(conn: Connection, /, current: str) -> None:
stmt = insert(db_patches).values(id=1, current=current)
stmt = stmt.on_conflict_do_update(set_={"current": stmt.excluded.current})
await conn.execute(stmt)
db_patches_dir = Path(__file__).parent / "sql"
async def apply_db_patches(conn: Connection, /) -> None:
"""Apply all remaining patches to the database.
Beware that patches will be applied in lexicographical order,
i.e. "10" comes before "9".
The current patch state is recorded in the DB itself.
Please note that every SQL statement in a patch file MUST be terminated
using two consecutive semi-colons (;).
Failing to do so will result in an error.
"""
applied_lvl = await current_patch_level(conn)
did_patch = False
for patchfile in sorted(db_patches_dir.glob("*.sql"), key=lambda p: p.stem):
patch_lvl = patchfile.stem
if patch_lvl <= applied_lvl:
continue
log.info("Applying patch: %s", patch_lvl)
sql = patchfile.read_text()
queries = sql.split(";;")
if len(queries) < 2:
log.error(
"Patch file is missing statement terminator (`;;'): %s", patchfile
)
raise RuntimeError("No statement found.")
async with transacted(conn):
for query in queries:
await conn.execute(sa.text(query))
await set_current_patch_level(conn, patch_lvl)
did_patch = True
if did_patch:
await _vacuum(conn)
async def _vacuum(conn: Connection, /) -> None:
"""Vacuum the database. """Vacuum the database.
This function cannot be run on a connection with an open transaction. This function cannot be run on a connection with an open transaction.
@ -194,11 +154,13 @@ async def set_import_progress(conn: Connection, /, progress: float) -> Progress:
return current return current
def _new_engine() -> AsyncEngine: def _connection_uri() -> str:
uri = f"sqlite+aiosqlite:///{config.storage_path}" return f"sqlite+aiosqlite:///{config.storage_path}"
def _new_engine() -> AsyncEngine:
return create_async_engine( return create_async_engine(
uri, _connection_uri(),
isolation_level="SERIALIZABLE", isolation_level="SERIALIZABLE",
) )
@ -257,6 +219,13 @@ async def new_connection() -> AsyncGenerator[Connection, None]:
async def transacted( async def transacted(
conn: Connection, /, *, force_rollback: bool = False conn: Connection, /, *, force_rollback: bool = False
) -> AsyncGenerator[None, None]: ) -> AsyncGenerator[None, None]:
"""Start a transaction for the given connection.
If `force_rollback` is `True` any changes will be rolled back at the end of the
transaction, unless they are explicitly committed.
Nesting transactions is allowed, but mixing values for `force_rollback` will likely
yield unexpected results.
"""
transaction = contextlib.nullcontext() if conn.in_transaction() else conn.begin() transaction = contextlib.nullcontext() if conn.in_transaction() else conn.begin()
async with transaction: async with transaction:
@ -268,11 +237,12 @@ async def transacted(
await conn.rollback() await conn.rollback()
async def add(conn: Connection, /, item: Model) -> None: async def add(conn: Connection, /, *items: Model) -> None:
for item in items:
# Support late initializing - used for optimization. # Support late initializing - used for optimization.
if getattr(item, "_is_lazy", False): if getattr(item, "_is_lazy", False):
assert hasattr(item, "_lazy_init") assert hasattr(item, "_lazy_init")
item._lazy_init() # pyright: ignore [reportGeneralTypeIssues] item._lazy_init() # pyright: ignore[reportAttributeAccessIssue]
table: sa.Table = item.__table__ table: sa.Table = item.__table__
values = asplain(item, serialize=True) values = asplain(item, serialize=True)
@ -294,17 +264,14 @@ async def fetch_one(
return result.first() return result.first()
ModelType = TypeVar("ModelType", bound=Model) async def get[T: Model](
async def get(
conn: Connection, conn: Connection,
/, /,
model: Type[ModelType], model: Type[T],
*, *,
order_by: tuple[sa.Column, Literal["asc", "desc"]] | None = None, order_by: tuple[sa.Column, Literal["asc", "desc"]] | None = None,
**field_values, **field_values,
) -> ModelType | None: ) -> T | None:
"""Load a model instance from the database. """Load a model instance from the database.
Passing `field_values` allows to filter the item to load. You have to encode the Passing `field_values` allows to filter the item to load. You have to encode the
@ -327,9 +294,9 @@ async def get(
return fromplain(model, row._mapping, serialized=True) if row else None return fromplain(model, row._mapping, serialized=True) if row else None
async def get_many( async def get_many[T: Model](
conn: Connection, /, model: Type[ModelType], **field_sets: set | list conn: Connection, /, model: Type[T], **field_sets: set | list
) -> Iterable[ModelType]: ) -> Iterable[T]:
"""Return the items with any values matching all given field sets. """Return the items with any values matching all given field sets.
This is similar to `get_all`, but instead of a scalar value a list of values This is similar to `get_all`, but instead of a scalar value a list of values
@ -346,9 +313,9 @@ async def get_many(
return (fromplain(model, row._mapping, serialized=True) for row in rows) return (fromplain(model, row._mapping, serialized=True) for row in rows)
async def get_all( async def get_all[T: Model](
conn: Connection, /, model: Type[ModelType], **field_values conn: Connection, /, model: Type[T], **field_values
) -> Iterable[ModelType]: ) -> Iterable[T]:
"""Filter all items by comparing all given field values. """Filter all items by comparing all given field values.
If no filters are given, all items will be returned. If no filters are given, all items will be returned.
@ -365,7 +332,7 @@ async def update(conn: Connection, /, item: Model) -> None:
# Support late initializing - used for optimization. # Support late initializing - used for optimization.
if getattr(item, "_is_lazy", False): if getattr(item, "_is_lazy", False):
assert hasattr(item, "_lazy_init") assert hasattr(item, "_lazy_init")
item._lazy_init() # pyright: ignore [reportGeneralTypeIssues] item._lazy_init() # pyright: ignore[reportAttributeAccessIssue]
table: sa.Table = item.__table__ table: sa.Table = item.__table__
values = asplain(item, serialize=True) values = asplain(item, serialize=True)
@ -466,6 +433,33 @@ async def add_or_update_rating(conn: Connection, /, rating: Rating) -> bool:
return False return False
async def get_awards(
conn: Connection, /, imdb_ids: list[ImdbMovieId]
) -> dict[ImdbMovieId, list[Award]]:
query = (
sa.select(Award, movies.c.imdb_id)
.join(movies, awards.c.movie_id == movies.c.id)
.where(movies.c.imdb_id.in_(imdb_ids))
)
rows = await fetch_all(conn, query)
awards_dict: dict[ImdbMovieId, list[Award]] = {}
for row in rows:
awards_dict.setdefault(row.imdb_id, []).append(
fromplain(Award, row._mapping, serialized=True)
)
return awards_dict
async def get_movie_ids(
conn: Connection, imdb_ids: list[ImdbMovieId]
) -> dict[ImdbMovieId, MovieId]:
query = sa.select(movies.c.imdb_id, movies.c.id).where(
movies.c.imdb_id.in_(imdb_ids)
)
rows = await fetch_all(conn, query)
return {row.imdb_id: MovieId(ULID(row.id)) for row in rows}
def sql_escape(s: str, char: str = "#") -> str: def sql_escape(s: str, char: str = "#") -> str:
return s.replace(char, 2 * char).replace("%", f"{char}%").replace("_", f"{char}_") return s.replace(char, 2 * char).replace("%", f"{char}%").replace("_", f"{char}_")
@ -481,7 +475,7 @@ async def find_ratings(
include_unrated: bool = False, include_unrated: bool = False,
yearcomp: tuple[Literal["<", "=", ">"], int] | None = None, yearcomp: tuple[Literal["<", "=", ">"], int] | None = None,
limit_rows: int = 10, limit_rows: int = 10,
user_ids: Iterable[str] = [], user_ids: Iterable[UserIdStr] = [],
) -> Iterable[dict[str, Any]]: ) -> Iterable[dict[str, Any]]:
conditions = [] conditions = []

View file

@ -3,8 +3,8 @@ import logging
import re import re
from collections import namedtuple from collections import namedtuple
from dataclasses import dataclass, field from dataclasses import dataclass, field
from datetime import datetime from datetime import datetime, timezone
from typing import AsyncIterable, NewType from typing import Any, AsyncIterable, Iterable
from urllib.parse import urljoin from urllib.parse import urljoin
import bs4 import bs4
@ -12,13 +12,11 @@ import bs4
from . import db from . import db
from .models import Movie, Rating, User from .models import Movie, Rating, User
from .request import adownload, asession, asoup_from_url, cache_path from .request import adownload, asession, asoup_from_url, cache_path
from .types import ImdbMovieId, ImdbRating, ImdbUserId, Score100
from .utils import json_dump
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
ImdbRating = NewType("ImdbRating", float) # Value range: [1.0, 10.0]
UnwindScore = NewType("UnwindScore", int) # Value range: [0, 100]
MovieId = NewType("MovieId", str) # Pattern: ttXXXXXXXX
UserId = NewType("UserId", str) # Pattern: urXXXXXXXX
# div#ratings-container # div#ratings-container
# div.lister-item.mode-detail # div.lister-item.mode-detail
@ -74,7 +72,7 @@ def movie_url(imdb_id: str):
return f"https://www.imdb.com/title/{imdb_id}/" return f"https://www.imdb.com/title/{imdb_id}/"
def imdb_rating_from_score(score: UnwindScore) -> ImdbRating: def imdb_rating_from_score(score: Score100) -> ImdbRating:
"""Return the IMDb rating from an Unwind Movie score.""" """Return the IMDb rating from an Unwind Movie score."""
assert 0 <= score <= 100 assert 0 <= score <= 100
rating = round(score * 9 / 100 + 1, 1) rating = round(score * 9 / 100 + 1, 1)
@ -82,7 +80,7 @@ def imdb_rating_from_score(score: UnwindScore) -> ImdbRating:
return ImdbRating(rating) return ImdbRating(rating)
def score_from_imdb_rating(rating: ImdbRating | int) -> UnwindScore: def score_from_imdb_rating(rating: ImdbRating | int) -> Score100:
"""Return the Unwind Movie score for an IMDb rating.""" """Return the Unwind Movie score for an IMDb rating."""
# Scale IMDb's 10 point rating to our score of [0, 100]. # Scale IMDb's 10 point rating to our score of [0, 100].
# There's a pitfall here! # There's a pitfall here!
@ -91,7 +89,7 @@ def score_from_imdb_rating(rating: ImdbRating | int) -> UnwindScore:
assert 1.0 <= rating <= 10.0 assert 1.0 <= rating <= 10.0
score = round(100 * (rating - 1) / 9) score = round(100 * (rating - 1) / 9)
assert 0 <= score <= 100 assert 0 <= score <= 100
return UnwindScore(score) return Score100(score)
# find_name: e.g. "Your Mom's Ratings" # find_name: e.g. "Your Mom's Ratings"
@ -108,7 +106,9 @@ find_year = re.compile(
r"(\([IVX]+\) )?\((?P<year>\d{4})(( |\d{4})| (?P<type>[^)]+))?\)" # noqa: RUF001 r"(\([IVX]+\) )?\((?P<year>\d{4})(( |\d{4})| (?P<type>[^)]+))?\)" # noqa: RUF001
).fullmatch ).fullmatch
# find_year_2: e.g. "2024", "19712003", "2024" # noqa: RUF003 # find_year_2: e.g. "2024", "19712003", "2024" # noqa: RUF003
find_year_2 = re.compile(r"(?P<year>\d{4})((?P<end_year>\d{4})?)?").fullmatch # noqa: RUF001 find_year_2 = re.compile(
r"(?P<year>\d{4})((?P<end_year>\d{4})?)?" # noqa: RUF001
).fullmatch
find_movie_id = re.compile(r"/title/(?P<id>tt\d+)/").search find_movie_id = re.compile(r"/title/(?P<id>tt\d+)/").search
find_movie_name = re.compile(r"\d+\. (?P<name>.+)").fullmatch find_movie_name = re.compile(r"\d+\. (?P<name>.+)").fullmatch
# find_vote_count: e.g. "(5.9K)", "(1K)", "(8)" # find_vote_count: e.g. "(5.9K)", "(1K)", "(8)"
@ -116,9 +116,8 @@ find_vote_count = re.compile(r"\((?P<count>\d+(\.\d+)?K?)\)").fullmatch
def _first_string(tag: bs4.Tag) -> str | None: def _first_string(tag: bs4.Tag) -> str | None:
for child in tag.children: for s in tag.strings:
if isinstance(child, str): return s
return child
def _tv_episode_title(series_name: str, episode_name: str) -> str: def _tv_episode_title(series_name: str, episode_name: str) -> str:
@ -126,7 +125,7 @@ def _tv_episode_title(series_name: str, episode_name: str) -> str:
def _movie_and_rating_from_item_legacy(item: bs4.Tag) -> tuple[Movie, Rating]: def _movie_and_rating_from_item_legacy(item: bs4.Tag) -> tuple[Movie, Rating]:
genres = (genre := item.find("span", "genre")) and genre.string or "" genres = ((genre := item.find("span", "genre")) and genre.string) or ""
movie = Movie( movie = Movie(
title=item.h3.a.string.strip(), title=item.h3.a.string.strip(),
genres={s.strip() for s in genres.split(",")}, genres={s.strip() for s in genres.split(",")},
@ -166,7 +165,9 @@ def _movie_and_rating_from_item_legacy(item: bs4.Tag) -> tuple[Movie, Rating]:
ratings_item = item.find("div", "ipl-rating-widget") ratings_item = item.find("div", "ipl-rating-widget")
if match := find_rating_date(ratings_item.find_next("p", "text-muted").string): if match := find_rating_date(ratings_item.find_next("p", "text-muted").string):
rating.rating_date = datetime.strptime(match["date"], "%d %b %Y") rating.rating_date = datetime.strptime(match["date"], "%d %b %Y").replace(
tzinfo=timezone.utc
)
if match := ratings_item.find("div", "ipl-rating-star--other-user"): if match := ratings_item.find("div", "ipl-rating-star--other-user"):
if rating_item := match.find("span", "ipl-rating-star__rating"): if rating_item := match.find("span", "ipl-rating-star__rating"):
rating.score = score_from_imdb_rating(float(rating_item.string)) rating.score = score_from_imdb_rating(float(rating_item.string))
@ -236,15 +237,16 @@ _ForgedRequest = namedtuple("_ForgedRequest", "url headers")
class _RatingsPage: class _RatingsPage:
ratings: list[Rating] = field(default_factory=list) ratings: list[Rating] = field(default_factory=list)
next_page_url: str | None = None next_page_url: str | None = None
imdb_user_id: UserId | None = None imdb_user_id: ImdbUserId | None = None
imdb_user_name: str | None = None imdb_user_name: str | None = None
async def _load_ratings_page(url: str, user_id: UserId) -> _RatingsPage: async def _load_ratings_page(url: str, user_id: ImdbUserId) -> _RatingsPage:
"""Dispatch to handlers for different ratings page versions.""" """Dispatch to handlers for different ratings page versions."""
soup = await asoup_from_url(url) soup = await asoup_from_url(url)
if soup.find("script", id="__NEXT_DATA__", type="application/json") is not None:
return await _load_ratings_page_202407(user_id, url, soup)
if soup.find("meta", property="imdb:pageConst") is not None: if soup.find("meta", property="imdb:pageConst") is not None:
return await _load_ratings_page_2024(user_id, url, soup) return await _load_ratings_page_2024(user_id, url, soup)
elif soup.find("meta", property="pageId") is not None: elif soup.find("meta", property="pageId") is not None:
@ -253,8 +255,92 @@ async def _load_ratings_page(url: str, user_id: UserId) -> _RatingsPage:
raise RuntimeError("Unknown ratings page version.") raise RuntimeError("Unknown ratings page version.")
def _get_or_None(d: dict[str, Any], keys: list[str]) -> Any | None:
for k in keys:
try:
d = d[k]
except KeyError:
return None
if d is None:
break
return d
def _parse_movies_from_nextdata_202407(nextdata: dict) -> Iterable[Movie]:
nextratings = nextdata["props"]["pageProps"]["mainColumnData"][
"advancedTitleSearch"
]["edges"]
for ratingdata in nextratings:
ratingdata = ratingdata["node"]["title"]
# endYear=ratingdata["releaseYear"]["endYear"]
imdb_rating = _get_or_None(ratingdata, ["ratingsSummary", "aggregateRating"])
runtime_s = _get_or_None(ratingdata, ["runtime", "seconds"])
movie = Movie(
title=ratingdata["titleText"]["text"],
original_title=_get_or_None(ratingdata, ["originalTitleText", "text"]),
release_year=ratingdata["releaseYear"]["year"],
media_type=ratingdata["titleType"]["text"],
imdb_id=ratingdata["id"],
imdb_score=(
None if imdb_rating is None else score_from_imdb_rating(imdb_rating)
),
imdb_votes=_get_or_None(ratingdata, ["ratingsSummary", "voteCount"]),
runtime=None if runtime_s is None else int(runtime_s / 60),
genres={
genre["genre"]["text"] for genre in ratingdata["titleGenres"]["genres"]
},
)
if movie.media_type == "TV Episode":
seriesdata = ratingdata["series"]["series"]
series_original_title = seriesdata["originalTitleText"]["text"]
series_title = seriesdata["titleText"]["text"]
# series_id = seriesdata["releaseYear"]["id"]
# series_year = seriesdata["releaseYear"]["year"]
# series_endyear = seriesdata["releaseYear"]["endYear"]
movie.title = _tv_episode_title(series_title, movie.title)
movie.original_title = _tv_episode_title(
series_original_title, movie.original_title
)
yield movie
async def _load_ratings_page_202407(
user_id: ImdbUserId, url: str, soup: bs4.BeautifulSoup
) -> _RatingsPage:
"""Handle the ratings page from July 2024."""
if (
nextjson := soup.find("script", id="__NEXT_DATA__", type="application/json")
) is None:
raise RuntimeError("No __NEXT_DATA__ BLOB found.")
nextdata = json.loads(nextjson.string.strip())
userdata = nextdata["props"]["pageProps"]["aboveTheFoldData"]
page = _RatingsPage(
imdb_user_id=userdata["authorId"],
imdb_user_name=userdata["authorName"],
)
movies = _parse_movies_from_nextdata_202407(nextdata)
movies_dict = {m.imdb_id: m for m in movies}
async for rating in _load_user_movie_ratings(user_id, list(movies_dict.keys())):
movie = movies_dict[rating.movie_id]
rating = Rating(
movie=movie,
score=score_from_imdb_rating(rating.imdb_rating),
rating_date=rating.rating_date,
)
page.ratings.append(rating)
return page
async def _load_ratings_page_2024( async def _load_ratings_page_2024(
user_id: UserId, url: str, soup: bs4.BeautifulSoup user_id: ImdbUserId, url: str, soup: bs4.BeautifulSoup
) -> _RatingsPage: ) -> _RatingsPage:
"""Handle the ratings page from 2024.""" """Handle the ratings page from 2024."""
page = _RatingsPage() page = _RatingsPage()
@ -356,8 +442,13 @@ async def _load_ratings_page_legacy(url: str, soup: bs4.BeautifulSoup) -> _Ratin
async def load_and_store_ratings( async def load_and_store_ratings(
user_id: UserId, user_id: ImdbUserId,
) -> AsyncIterable[tuple[Rating, bool]]: ) -> AsyncIterable[tuple[Rating, bool]]:
"""Load user ratings from imdb.com and store them in our database.
All loaded ratings are yielded together with the information whether each rating
was already present in our database.
"""
async with db.new_connection() as conn: async with db.new_connection() as conn:
user = await db.get(conn, User, imdb_id=user_id) or User( user = await db.get(conn, User, imdb_id=user_id) or User(
imdb_id=user_id, name="", secret="" imdb_id=user_id, name="", secret=""
@ -384,7 +475,8 @@ async def load_and_store_ratings(
yield rating, is_updated yield rating, is_updated
async def load_ratings(user_id: UserId) -> AsyncIterable[Rating]: async def load_ratings(user_id: ImdbUserId) -> AsyncIterable[Rating]:
"""Return all ratings for the given user from imdb.com."""
next_url = user_ratings_url(user_id) next_url = user_ratings_url(user_id)
while next_url: while next_url:
@ -394,46 +486,70 @@ async def load_ratings(user_id: UserId) -> AsyncIterable[Rating]:
yield rating yield rating
async def _ids_from_list_html(url: str) -> AsyncIterable[MovieId]: def _ids_from_list_html(soup: bs4.BeautifulSoup) -> Iterable[ImdbMovieId]:
"""Return all IMDb movie IDs (`tt*`) from the given URL.""" """Return all IMDb movie IDs (`tt*`) from the given soup."""
# document.querySelectorAll('li.ipc-metadata-list-summary-item a.ipc-title-link-wrapper') # document.querySelectorAll('li.ipc-metadata-list-summary-item a.ipc-title-link-wrapper')
# .href: '/title/tt1213644/?ref_=chtbtm_t_1' # .href: '/title/tt1213644/?ref_=chtbtm_t_1'
# .text(): '1. Disaster Movie' # .text(): '1. Disaster Movie'
soup = await asoup_from_url(url)
for item in soup.find_all("li", "ipc-metadata-list-summary-item"): for item in soup.find_all("li", "ipc-metadata-list-summary-item"):
if (link := item.find("a", "ipc-title-link-wrapper")) is not None: if (link := item.find("a", "ipc-title-link-wrapper")) is not None:
if (href := link.get("href")) is not None: if (href := link.get("href")) is not None:
if match_ := find_movie_id(href): if match_ := find_movie_id(href):
yield match_["id"] yield ImdbMovieId(match_["id"])
async def load_most_popular_100() -> list[MovieId]: def _items_from_ldjson(soup: bs4.BeautifulSoup) -> Iterable[dict]:
"""Return all items from the LD+JSON block in the given soup."""
if (item := soup.find("script", type="application/ld+json")) is None:
raise RuntimeError("Could not find LD+JSON data.")
data = json.loads(item.string.strip())
if data["@type"] != "ItemList":
raise RuntimeError(f"Expected ItemList, got {data['@type']!a}.")
for item in data["itemListElement"]:
yield item["item"]
def _ids_from_ldjson(soup: bs4.BeautifulSoup) -> Iterable[ImdbMovieId]:
for item in _items_from_ldjson(soup):
if match_ := find_movie_id(item["url"]):
yield ImdbMovieId(match_["id"])
async def load_most_popular_100() -> list[ImdbMovieId]:
"""Return the IMDb's top 100 most popular movies. """Return the IMDb's top 100 most popular movies.
IMDb Charts: Most Popular Movies IMDb Charts: Most Popular Movies
As determined by IMDb users As determined by IMDb users
""" """
url = "https://www.imdb.com/chart/moviemeter/" url = "https://www.imdb.com/chart/moviemeter/"
ids = [tid async for tid in _ids_from_list_html(url)] soup = await asoup_from_url(url)
try:
ids = list(_ids_from_ldjson(soup))
except RuntimeError:
ids = list(_ids_from_list_html(soup))
if len(ids) != 100: if len(ids) != 100:
raise RuntimeError(f"Expected exactly 100 items, got {len(ids)}") raise RuntimeError(f"Expected exactly 100 items, got {len(ids)}")
return ids return ids
async def load_bottom_100() -> list[MovieId]: async def load_bottom_100() -> list[ImdbMovieId]:
"""Return the IMDb's bottom 100 lowest rated movies. """Return the IMDb's bottom 100 lowest rated movies.
IMDb Charts: Lowest Rated Movies IMDb Charts: Lowest Rated Movies
Bottom 100 as voted by IMDb users Bottom 100 as voted by IMDb users
""" """
url = "https://www.imdb.com/chart/bottom/" url = "https://www.imdb.com/chart/bottom/"
ids = [tid async for tid in _ids_from_list_html(url)] soup = await asoup_from_url(url)
try:
ids = list(_ids_from_ldjson(soup))
except RuntimeError:
ids = list(_ids_from_list_html(soup))
if len(ids) != 100: if len(ids) != 100:
raise RuntimeError(f"Expected exactly 100 items, got {len(ids)}") raise RuntimeError(f"Expected exactly 100 items, got {len(ids)}")
return ids return ids
async def load_top_250() -> list[MovieId]: async def load_top_250() -> list[ImdbMovieId]:
"""Return the IMDb's top 250 highest rated movies. """Return the IMDb's top 250 highest rated movies.
IMDb Charts: IMDb Top 250 Movies IMDb Charts: IMDb Top 250 Movies
@ -443,13 +559,15 @@ async def load_top_250() -> list[MovieId]:
qgl_api_url = "https://caching.graphql.imdb.com/" qgl_api_url = "https://caching.graphql.imdb.com/"
query = { query = {
"operationName": "Top250MoviesPagination", "operationName": "Top250MoviesPagination",
"variables": {"first": 250, "locale": "en-US"}, "variables": json_dump({"first": 250, "locale": "en-US"}),
"extensions": { "extensions": json_dump(
{
"persistedQuery": { "persistedQuery": {
"sha256Hash": "26114ee01d97e04f65d6c8c7212ae8b7888fa57ceed105450d1fce09df749b2d", "sha256Hash": "26114ee01d97e04f65d6c8c7212ae8b7888fa57ceed105450d1fce09df749b2d",
"version": 1, "version": 1,
} }
}, }
),
} }
headers = { headers = {
"accept": "application/graphql+json, application/json", "accept": "application/graphql+json, application/json",
@ -476,13 +594,13 @@ async def load_top_250() -> list[MovieId]:
@dataclass @dataclass
class _UserMovieRating: class _UserMovieRating:
movie_id: MovieId movie_id: ImdbMovieId
rating_date: datetime rating_date: datetime
imdb_rating: ImdbRating imdb_rating: ImdbRating
async def _load_user_movie_ratings( async def _load_user_movie_ratings(
user_id: UserId, movie_ids: list[MovieId] user_id: ImdbUserId, movie_ids: list[ImdbMovieId]
) -> AsyncIterable[_UserMovieRating]: ) -> AsyncIterable[_UserMovieRating]:
qgl_api_url = "https://api.graphql.imdb.com/" qgl_api_url = "https://api.graphql.imdb.com/"
headers = { headers = {
@ -527,3 +645,55 @@ async def _load_user_movie_ratings(
except KeyError as err: except KeyError as err:
log.error("Unexpected data structure.", exc_info=err) log.error("Unexpected data structure.", exc_info=err)
raise raise
async def _load_user_ratings_202407(
user_id: ImdbUserId, movie_ids: list[ImdbMovieId]
) -> AsyncIterable[_UserMovieRating]:
"""
This is a new API that showed up in July 2024.
It's used on a user's ratings page to load their ratings.
"""
raise NotImplementedError()
qgl_api_url = "https://api.graphql.imdb.com/"
headers = {
"accept": "application/graphql+json, application/json",
"content-type": "application/json",
"origin": "https://www.imdb.com",
}
query = {
"operationName": "RatingsPage",
"variables": json.dumps(
{
# "after": ...,
"filter": {
"explicitContentConstraint": {
"explicitContentFilter": "INCLUDE_ADULT"
},
"singleUserRatingConstraint": {
"filterType": "INCLUDE",
"userId": user_id,
},
},
"first": 100,
"locale": "en-US",
"sort": {"sortBy": "SINGLE_USER_RATING_DATE", "sortOrder": "ASC"},
}
),
"extensions": json.dumps(
{
"persistedQuery": {
"sha256Hash": "ae30a55f169252b5f0208d686f41aaff231d7f70bb75c257732c80234d71dbe9",
"version": 1,
}
}
),
}
async with asession() as s:
r = await s.get(qgl_api_url, headers=headers, query=query, timeout=10)
r.raise_for_status()
data = r.json()
# ...

View file

@ -5,7 +5,7 @@ import logging
from dataclasses import dataclass, fields from dataclasses import dataclass, fields
from datetime import datetime, timezone from datetime import datetime, timezone
from pathlib import Path from pathlib import Path
from typing import Generator, Literal, Type, TypeVar, overload from typing import Generator, Literal, Type, overload
from . import config, db, request from . import config, db, request
from .db import add_or_update_many_movies from .db import add_or_update_many_movies
@ -14,8 +14,6 @@ from .models import Movie
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
T = TypeVar("T")
# See # See
# - https://developer.imdb.com/non-commercial-datasets/ # - https://developer.imdb.com/non-commercial-datasets/
# - https://datasets.imdbws.com/ # - https://datasets.imdbws.com/
@ -127,7 +125,7 @@ def read_imdb_tsv(
@overload @overload
def read_imdb_tsv( def read_imdb_tsv[T](
path: Path, row_type: Type[T], *, unpack: Literal[True] = True path: Path, row_type: Type[T], *, unpack: Literal[True] = True
) -> Generator[T, None, None]: ... ) -> Generator[T, None, None]: ...

View file

@ -2,7 +2,6 @@ import json
from dataclasses import dataclass, field from dataclasses import dataclass, field
from dataclasses import fields as _fields from dataclasses import fields as _fields
from datetime import datetime, timezone from datetime import datetime, timezone
from functools import partial
from types import UnionType from types import UnionType
from typing import ( from typing import (
Annotated, Annotated,
@ -11,24 +10,33 @@ from typing import (
Container, Container,
Literal, Literal,
Mapping, Mapping,
NewType,
Protocol, Protocol,
Type, Type,
TypeAliasType,
TypedDict, TypedDict,
TypeVar,
Union, Union,
get_args, get_args,
get_origin, get_origin,
) )
from sqlalchemy import Column, ForeignKey, Integer, String, Table from sqlalchemy import Column, ForeignKey, Index, Integer, String, Table
from sqlalchemy.orm import registry from sqlalchemy.orm import registry
from .types import ULID from .types import (
ULID,
JSON = int | float | str | None | list["JSON"] | dict[str, "JSON"] AwardId,
JSONObject = dict[str, JSON] GroupId,
ImdbMovieId,
T = TypeVar("T") JSONObject,
JSONScalar,
MovieId,
RatingId,
Score100,
UserId,
UserIdStr,
)
from .utils import json_dump
class Model(Protocol): class Model(Protocol):
@ -38,8 +46,22 @@ class Model(Protocol):
mapper_registry = registry() mapper_registry = registry()
metadata = mapper_registry.metadata metadata = mapper_registry.metadata
# An explicit naming convention helps Alembic do its job,
# see https://alembic.sqlalchemy.org/en/latest/naming.html.
metadata.naming_convention = {
"ix": "ix_%(column_0_label)s",
"uq": "uq_%(table_name)s_%(column_0_name)s",
"ck": "ck_%(table_name)s_%(constraint_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s",
}
def annotations(tp: Type) -> tuple | None: def annotations(tp: Type) -> tuple | None:
# Support type aliases and generic aliases.
if isinstance(tp, TypeAliasType) or hasattr(tp, "__value__"):
tp = tp.__value__
return tp.__metadata__ if hasattr(tp, "__metadata__") else None # type: ignore return tp.__metadata__ if hasattr(tp, "__metadata__") else None # type: ignore
@ -97,13 +119,24 @@ def optional_fields(o):
yield f yield f
json_dump = partial(json.dumps, separators=(",", ":")) def _id[T](x: T) -> T:
"""Return the given argument, aka. the identity function."""
def _id(x: T) -> T:
return x return x
def _unpack(type_: Any) -> Any:
"""Return the wrapped type."""
# Handle type aliases.
if isinstance(type_, TypeAliasType):
return _unpack(type_.__value__)
# Handle newtypes.
if isinstance(type_, NewType):
return _unpack(type_.__supertype__)
return type_
def asplain( def asplain(
o: object, *, filter_fields: Container[str] | None = None, serialize: bool = False o: object, *, filter_fields: Container[str] | None = None, serialize: bool = False
) -> dict[str, Any]: ) -> dict[str, Any]:
@ -125,13 +158,16 @@ def asplain(
if filter_fields is not None and f.name not in filter_fields: if filter_fields is not None and f.name not in filter_fields:
continue continue
target: Any = f.type target: Any = _unpack(f.type)
# XXX this doesn't properly support any kind of nested types # XXX this doesn't properly support any kind of nested types
if (otype := optional_type(f.type)) is not None: if (otype := optional_type(f.type)) is not None:
target = otype target = otype
if (otype := get_origin(target)) is not None: if (otype := get_origin(target)) is not None:
target = otype target = otype
target = _unpack(target)
v = getattr(o, f.name) v = getattr(o, f.name)
if is_optional(f.type) and v is None: if is_optional(f.type) and v is None:
d[f.name] = None d[f.name] = None
@ -148,28 +184,33 @@ def asplain(
assert isinstance(v, list) assert isinstance(v, list)
d[f.name] = dump(list(v)) d[f.name] = dump(list(v))
elif target in {bool, str, int, float}: elif target in {bool, str, int, float}:
assert isinstance( assert isinstance(v, target), (
v, target f"Type mismatch: {f.name!a} ({target!a} != {type(v)!a})"
), f"Type mismatch: {f.name} ({target} != {type(v)})" )
d[f.name] = v
elif target in {Literal}:
assert isinstance(v, JSONScalar.__value__)
d[f.name] = v d[f.name] = v
else: else:
raise ValueError(f"Unsupported value type: {f.name}: {type(v)}") raise ValueError(f"Unsupported value type: {f.name!a}: {type(v)!a}")
return d return d
def fromplain(cls: Type[T], d: Mapping, *, serialized: bool = False) -> T: def fromplain[T](cls: Type[T], d: Mapping, *, serialized: bool = False) -> T:
"""Return an instance of the given model using the given data. """Return an instance of the given model using the given data.
If `serialized` is `True`, collection types (lists, dicts, etc.) will be If `serialized` is `True`, collection types (lists, dicts, etc.) will be
deserialized from string. This is the opposite operation of `serialize` for deserialized from string. This is the opposite operation of `serialize` for
`asplain`. `asplain`.
Fields in the data that cannot be mapped to the given type are simply ignored.
""" """
load = json.loads if serialized else _id load = json.loads if serialized else _id
dd: JSONObject = {} dd: JSONObject = {}
for f in fields(cls): for f in fields(cls):
target: Any = f.type target: Any = _unpack(f.type)
otype = optional_type(f.type) otype = optional_type(f.type)
is_opt = otype is not None is_opt = otype is not None
if is_opt: if is_opt:
@ -177,9 +218,17 @@ def fromplain(cls: Type[T], d: Mapping, *, serialized: bool = False) -> T:
if (xtype := get_origin(target)) is not None: if (xtype := get_origin(target)) is not None:
target = xtype target = xtype
target = _unpack(target)
v = d[f.name] v = d[f.name]
if is_opt and v is None: if is_opt and v is None:
dd[f.name] = v dd[f.name] = v
elif target is Literal:
# Support literal types.
vals = get_args(f.type.__value__)
if v not in vals:
raise ValueError(f"Invalid value: {f.name!a}: {v!a}")
dd[f.name] = v
elif isinstance(v, target): elif isinstance(v, target):
dd[f.name] = v dd[f.name] = v
elif target in {set, list}: elif target in {set, list}:
@ -196,27 +245,38 @@ def fromplain(cls: Type[T], d: Mapping, *, serialized: bool = False) -> T:
def validate(o: object) -> None: def validate(o: object) -> None:
for f in fields(o): for f in fields(o):
vtype = type(getattr(o, f.name)) ftype = _unpack(f.type)
if vtype is f.type:
v = getattr(o, f.name)
vtype = type(v)
if vtype is ftype:
continue continue
origin = get_origin(f.type) origin = get_origin(ftype)
if origin is vtype: if origin is vtype:
continue continue
is_union = isinstance(f.type, UnionType) or origin is Union is_union = isinstance(ftype, UnionType) or origin is Union
if is_union: if is_union:
# Support unioned types. # Support unioned types.
utypes = get_args(f.type) utypes = get_args(ftype)
utypes = [_unpack(t) for t in utypes]
if vtype in utypes: if vtype in utypes:
continue continue
# Support generic types (set[str], list[int], etc.) # Support generic types (set[str], list[int], etc.)
gtypes = [g for u in utypes if (g := get_origin(u)) is not None] gtypes = [_unpack(g) for u in utypes if (g := get_origin(u)) is not None]
if any(vtype is gtype for gtype in gtypes): if any(vtype is gtype for gtype in gtypes):
continue continue
raise ValueError(f"Invalid value type: {f.name}: {vtype}") if origin is Literal:
# Support literal types.
vals = get_args(ftype)
if v in vals:
continue
raise ValueError(f"Invalid value: {f.name!a}: {v!a}")
raise ValueError(f"Invalid value type: {f.name!a}: {vtype!a}")
def utcnow() -> datetime: def utcnow() -> datetime:
@ -224,23 +284,6 @@ def utcnow() -> datetime:
return datetime.now(timezone.utc) return datetime.now(timezone.utc)
@mapper_registry.mapped
@dataclass
class DbPatch:
__table__: ClassVar[Table] = Table(
"db_patches",
metadata,
Column("id", Integer, primary_key=True),
Column("current", String),
)
id: int
current: str
db_patches = DbPatch.__table__
@mapper_registry.mapped @mapper_registry.mapped
@dataclass @dataclass
class Progress: class Progress:
@ -312,15 +355,15 @@ class Movie:
Column("updated", String, nullable=False), # datetime Column("updated", String, nullable=False), # datetime
) )
id: ULID = field(default_factory=ULID) id: MovieId = field(default_factory=ULID)
title: str = None # canonical title (usually English) title: str = None # canonical title (usually English)
original_title: str | None = ( original_title: str | None = (
None # original title (usually transscribed to latin script) None # original title (usually transscribed to latin script)
) )
release_year: int = None # canonical release date release_year: int = None # canonical release date
media_type: str = None media_type: str = None
imdb_id: str = None imdb_id: ImdbMovieId = None
imdb_score: int | None = None # range: [0,100] imdb_score: Score100 | None = None # range: [0,100]
imdb_votes: int | None = None imdb_votes: int | None = None
runtime: int | None = None # minutes runtime: int | None = None # minutes
genres: set[str] | None = None genres: set[str] | None = None
@ -365,10 +408,10 @@ dataclass containing the ID of the linked data.
The contents of the Relation are ignored or discarded when using The contents of the Relation are ignored or discarded when using
`asplain`, `fromplain`, and `validate`. `asplain`, `fromplain`, and `validate`.
""" """
Relation = Annotated[T | None, _RelationSentinel] type Relation[T] = Annotated[T | None, _RelationSentinel]
Access = Literal[ type Access = Literal[
"r", # read "r", # read
"i", # index "i", # index
"w", # write "w", # write
@ -393,8 +436,8 @@ class User:
Column("groups", String, nullable=False), # JSON array Column("groups", String, nullable=False), # JSON array
) )
id: ULID = field(default_factory=ULID) id: UserId = field(default_factory=ULID)
imdb_id: str = None imdb_id: ImdbMovieId = None
name: str = None # canonical user name name: str = None # canonical user name
secret: str = None secret: str = None
groups: list[UserGroup] = field(default_factory=list) groups: list[UserGroup] = field(default_factory=list)
@ -413,6 +456,9 @@ class User:
self.groups.append({"id": group_id, "access": access}) self.groups.append({"id": group_id, "access": access})
users = User.__table__
@mapper_registry.mapped @mapper_registry.mapped
@dataclass @dataclass
class Rating: class Rating:
@ -428,15 +474,15 @@ class Rating:
Column("finished", Integer), # bool Column("finished", Integer), # bool
) )
id: ULID = field(default_factory=ULID) id: RatingId = field(default_factory=ULID)
movie_id: ULID = None movie_id: MovieId = None
movie: Relation[Movie] = None movie: Relation[Movie] = None
user_id: ULID = None user_id: UserId = None
user: Relation[User] = None user: Relation[User] = None
score: int = None # range: [0,100] score: Score100 = None # range: [0,100]
rating_date: datetime = None rating_date: datetime = None
favorite: bool | None = None favorite: bool | None = None
finished: bool | None = None finished: bool | None = None
@ -455,64 +501,11 @@ class Rating:
ratings = Rating.__table__ ratings = Rating.__table__
Index("ratings_index", ratings.c.movie_id, ratings.c.user_id, unique=True)
# TODO
# - distinguish between ratings & watches
# - they are completely separate
# - I can rate something at any time, without having watched it, e.g. in a discussion with a friend I change my opinion on a movie
# - I can watch something without having fully formed an opinion yet, i.e. I don't want to rate it yet
# How are Rating.favorite and Rating.finished linked to Watches?
# - is Rating.favorite automatically Watches[-1].favorite, or any(Watches.favorite)?
# - Rating.favorite is nullable, so unless it's explicitly set we can default to Watches
# - is Rating.finished automatically any(Watches.finished)?
# - Rating.finished is nullable, so unless it's explicitly set we can default to Watches
# - can Rating.finished be set without a Watch?
# - yes
# - can Rating.favorite be set without a Watch?
# - yes
@mapper_registry.mapped
@dataclass
class Watch:
"""A "check-in" event, the user started watching a movie."""
__table__: ClassVar[Table] = Table(
"watches",
metadata,
Column("id", String, primary_key=True), # ULID
Column("movie_id", ForeignKey("movies.id"), nullable=False), # ULID
Column("user_id", ForeignKey("users.id"), nullable=False), # ULID
Column("started", String, nullable=False), # datetime
Column("finished", String), # datetime
Column("geoloc", String), # geo coords
Column("score", Integer), #
Column("favorite", Integer), # bool
)
id: ULID = field(default_factory=ULID)
movie_id: ULID = None
movie: Relation[Movie] = None
user_id: ULID = None
user: Relation[User] = None
started: datetime | None = None
finished: datetime | None = None
geoloc: str | None = None
score: int | None = None # range: [0,100]
favorite: bool | None = None
watches = Rating.__table__
class GroupUser(TypedDict): class GroupUser(TypedDict):
id: str id: UserIdStr
name: str name: str
@ -527,6 +520,72 @@ class Group:
Column("users", String, nullable=False), # JSON array Column("users", String, nullable=False), # JSON array
) )
id: ULID = field(default_factory=ULID) id: GroupId = field(default_factory=ULID)
name: str = None name: str = None
users: list[GroupUser] = field(default_factory=list) users: list[GroupUser] = field(default_factory=list)
type AwardCategory = Literal[
"imdb-top-250", "imdb-bottom-100", "imdb-pop-100", "oscars"
]
@mapper_registry.mapped
@dataclass
class Award:
__table__: ClassVar[Table] = Table(
"awards",
metadata,
Column("id", String, primary_key=True), # ULID
Column("movie_id", ForeignKey("movies.id"), nullable=False), # ULID
Column(
"category", String, nullable=False
), # Enum: "imdb-top-250", "imdb-bottom-100", "imdb-pop-100", "oscars", ...
Column(
"details", String, nullable=False
), # e.g. "23" (position in list), "2024, nominee, best director", "1977, winner, best picture", ...
Column("created", String, nullable=False), # datetime
Column("updated", String, nullable=False), # datetime
)
id: AwardId = field(default_factory=ULID)
movie_id: MovieId = None
movie: Relation[Movie] = None
category: AwardCategory = None
details: str = None
created: datetime = field(default_factory=utcnow)
updated: datetime = field(default_factory=utcnow)
@property
def _details(self) -> JSONObject:
return json.loads(self.details or "{}")
@_details.setter
def _details(self, details: JSONObject):
self.details = json_dump(details)
@property
def position(self) -> int:
return self._details["position"]
@position.setter
def position(self, position: int):
details = self._details
details["position"] = position
self._details = details
@property
def name(self) -> str:
return self._details["name"]
@name.setter
def name(self, name: str):
details = self._details
details["name"] = name
self._details = details
awards = Award.__table__

View file

@ -11,7 +11,7 @@ from hashlib import md5
from pathlib import Path from pathlib import Path
from random import random from random import random
from time import sleep, time from time import sleep, time
from typing import Any, Callable, ParamSpec, TypeVar, cast, overload from typing import Any, Callable, cast, overload
import bs4 import bs4
import httpx import httpx
@ -24,13 +24,10 @@ if config.debug and config.cachedir:
config.cachedir.mkdir(exist_ok=True) config.cachedir.mkdir(exist_ok=True)
_shared_asession = None
_ASession_T = httpx.AsyncClient _ASession_T = httpx.AsyncClient
_Response_T = httpx.Response type _Response_T = httpx.Response
_T = TypeVar("_T") _shared_asession: _ASession_T | None = None
_P = ParamSpec("_P")
@asynccontextmanager @asynccontextmanager
@ -59,17 +56,17 @@ async def asession():
_shared_asession = None _shared_asession = None
def _throttle( def _throttle[T, **P](
times: int, per_seconds: float, jitter: Callable[[], float] | None = None times: int, per_seconds: float, jitter: Callable[[], float] | None = None
) -> Callable[[Callable[_P, _T]], Callable[_P, _T]]: ) -> Callable[[Callable[P, T]], Callable[P, T]]:
calls: deque[float] = deque(maxlen=times) calls: deque[float] = deque(maxlen=times)
if jitter is None: if jitter is None:
jitter = lambda: 0.0 # noqa: E731 jitter = lambda: 0.0 # noqa: E731
def decorator(func: Callable[_P, _T]) -> Callable[_P, _T]: def decorator(func: Callable[P, T]) -> Callable[P, T]:
@wraps(func) @wraps(func)
def inner(*args: _P.args, **kwds: _P.kwargs): def inner(*args: P.args, **kwds: P.kwargs):
# clean up # clean up
while calls: while calls:
if calls[0] + per_seconds > time(): if calls[0] + per_seconds > time():

View file

@ -1,36 +0,0 @@
PRAGMA foreign_keys = ON;;
CREATE TABLE IF NOT EXISTS users (
id TEXT NOT NULL PRIMARY KEY,
imdb_id TEXT NOT NULL UNIQUE,
name TEXT NOT NULL
);;
CREATE TABLE IF NOT EXISTS movies (
id TEXT NOT NULL PRIMARY KEY,
title TEXT NOT NULL,
release_year NUMBER NOT NULL,
media_type TEXT NOT NULL,
imdb_id TEXT NOT NULL UNIQUE,
score NUMBER NOT NULL,
runtime NUMBER,
genres TEXT NOT NULL,
updated TEXT NOT NULL
);;
CREATE TABLE IF NOT EXISTS ratings (
id TEXT NOT NULL PRIMARY KEY,
movie_id TEXT NOT NULL,
user_id TEXT NOT NULL,
score NUMBER NOT NULL,
rating_date TEXT NOT NULL,
favorite NUMBER,
finished NUMBER,
FOREIGN KEY(movie_id) REFERENCES movies(id),
FOREIGN KEY(user_id) REFERENCES users(id)
);;
CREATE UNIQUE INDEX IF NOT EXISTS ratings_index ON ratings (
movie_id,
user_id
);;

View file

@ -1,40 +0,0 @@
-- add original_title to movies table
-- see https://www.sqlite.org/lang_altertable.html#caution
-- 1. Create new table
-- 2. Copy data
-- 3. Drop old table
-- 4. Rename new into old
CREATE TABLE _migrate_movies (
id TEXT NOT NULL PRIMARY KEY,
title TEXT NOT NULL,
original_title TEXT,
release_year NUMBER NOT NULL,
media_type TEXT NOT NULL,
imdb_id TEXT NOT NULL UNIQUE,
score NUMBER,
runtime NUMBER,
genres TEXT NOT NULL,
updated TEXT NOT NULL
);;
INSERT INTO _migrate_movies
SELECT
id,
title,
NULL,
release_year,
media_type,
imdb_id,
score,
runtime,
genres,
updated
FROM movies
WHERE true;;
DROP TABLE movies;;
ALTER TABLE _migrate_movies
RENAME TO movies;;

View file

@ -1,46 +0,0 @@
-- only set original_title if it differs from title,
-- and normalize media_type with an extra table.
CREATE TABLE mediatypes (
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL UNIQUE
);;
INSERT INTO mediatypes (name)
SELECT DISTINCT media_type
FROM movies
WHERE true;;
CREATE TABLE _migrate_movies (
id TEXT PRIMARY KEY NOT NULL,
title TEXT NOT NULL,
original_title TEXT,
release_year INTEGER NOT NULL,
media_type_id INTEGER NOT NULL,
imdb_id TEXT NOT NULL UNIQUE,
score INTEGER,
runtime INTEGER,
genres TEXT NOT NULL,
updated TEXT NOT NULL,
FOREIGN KEY(media_type_id) REFERENCES mediatypes(id)
);;
INSERT INTO _migrate_movies
SELECT
id,
title,
(CASE WHEN original_title=title THEN NULL ELSE original_title END),
release_year,
(SELECT id FROM mediatypes WHERE name=media_type) AS media_type_id,
imdb_id,
score,
runtime,
genres,
updated
FROM movies
WHERE true;;
DROP TABLE movies;;
ALTER TABLE _migrate_movies
RENAME TO movies;;

View file

@ -1,62 +0,0 @@
-- add convenient view for movies
CREATE VIEW IF NOT EXISTS movies_view
AS SELECT
movies.id,
movies.title,
movies.original_title,
movies.release_year,
mediatypes.name AS media_type,
movies.imdb_id,
movies.score,
movies.runtime,
movies.genres,
movies.updated
FROM movies
JOIN mediatypes ON mediatypes.id=movies.media_type_id;;
CREATE TRIGGER IF NOT EXISTS insert_movies_view
INSTEAD OF INSERT
ON movies_view
BEGIN
INSERT INTO movies (
id,
title,
original_title,
release_year,
media_type_id,
imdb_id,
score,
runtime,
genres,
updated
) VALUES (
NEW.id,
NEW.title,
NEW.original_title,
NEW.release_year,
(SELECT id FROM mediatypes WHERE name=NEW.media_type),
NEW.imdb_id,
NEW.score,
NEW.runtime,
NEW.genres,
NEW.updated
);
END;;
CREATE TRIGGER IF NOT EXISTS update_movies_view
INSTEAD OF UPDATE OF media_type
ON movies_view
BEGIN
UPDATE movies
SET media_type_id=(SELECT id FROM mediatypes WHERE name=NEW.media_type)
WHERE id=OLD.id;
END;;
CREATE TRIGGER IF NOT EXISTS delete_movies_view
INSTEAD OF DELETE
ON movies_view
BEGIN
DELETE FROM movies
WHERE movies.id=OLD.id;
END;;

View file

@ -1,37 +0,0 @@
-- denormalize movie media_type
CREATE TABLE _migrate_movies (
id TEXT PRIMARY KEY NOT NULL,
title TEXT NOT NULL,
original_title TEXT,
release_year INTEGER NOT NULL,
media_type TEXT NOT NULL,
imdb_id TEXT NOT NULL UNIQUE,
score INTEGER,
runtime INTEGER,
genres TEXT NOT NULL,
updated TEXT NOT NULL
);;
INSERT INTO _migrate_movies
SELECT
id,
title,
original_title,
release_year,
(SELECT name FROM mediatypes WHERE id=media_type_id) AS media_type,
imdb_id,
score,
runtime,
genres,
updated
FROM movies
WHERE true;;
DROP VIEW movies_view;;
DROP TABLE mediatypes;;
DROP TABLE movies;;
ALTER TABLE _migrate_movies
RENAME TO movies;;

View file

@ -1,2 +0,0 @@
-- see the commit of this file for details.
;;

View file

@ -1,8 +0,0 @@
-- add groups table
CREATE TABLE groups (
id TEXT PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
secret TEXT NOT NULL,
users TEXT NOT NULL -- JSON array
);;

View file

@ -1,7 +0,0 @@
-- add progress table
CREATE TABLE progress (
id TEXT PRIMARY KEY NOT NULL,
state TEXT NOT NULL,
started TEXT NOT NULL
);;

View file

@ -1,36 +0,0 @@
-- add IMDb vote count
CREATE TABLE _migrate_movies (
id TEXT PRIMARY KEY NOT NULL,
title TEXT NOT NULL,
original_title TEXT,
release_year INTEGER NOT NULL,
media_type TEXT NOT NULL,
imdb_id TEXT NOT NULL UNIQUE,
imdb_score INTEGER,
imdb_votes INTEGER,
runtime INTEGER,
genres TEXT NOT NULL,
updated TEXT NOT NULL
);;
INSERT INTO _migrate_movies
SELECT
id,
title,
original_title,
release_year,
media_type,
imdb_id,
score AS imdb_score,
NULL AS imdb_votes,
runtime,
genres,
updated
FROM movies
WHERE true;;
DROP TABLE movies;;
ALTER TABLE _migrate_movies
RENAME TO movies;;

View file

@ -1,24 +0,0 @@
-- add IMDb vote count
CREATE TABLE _migrate_progress (
id TEXT PRIMARY KEY NOT NULL,
type TEXT NOT NULL,
state TEXT NOT NULL,
started TEXT NOT NULL,
stopped TEXT
);;
INSERT INTO _migrate_progress
SELECT
id,
'import-imdb-movies' AS type,
state,
started,
NULL AS stopped
FROM progress
WHERE true;;
DROP TABLE progress;;
ALTER TABLE _migrate_progress
RENAME TO progress;;

View file

@ -1,38 +0,0 @@
-- add creation timestamp to movies
CREATE TABLE _migrate_movies (
id TEXT PRIMARY KEY NOT NULL,
title TEXT NOT NULL,
original_title TEXT,
release_year INTEGER NOT NULL,
media_type TEXT NOT NULL,
imdb_id TEXT NOT NULL UNIQUE,
imdb_score INTEGER,
imdb_votes INTEGER,
runtime INTEGER,
genres TEXT NOT NULL,
created TEXT NOT NULL,
updated TEXT NOT NULL
);;
INSERT INTO _migrate_movies
SELECT
id,
title,
original_title,
release_year,
media_type,
imdb_id,
imdb_score,
imdb_votes,
runtime,
genres,
updated AS created,
updated
FROM movies
WHERE true;;
DROP TABLE movies;;
ALTER TABLE _migrate_movies
RENAME TO movies;;

View file

@ -1,24 +0,0 @@
-- add IMDb vote count
CREATE TABLE _migrate_progress (
id TEXT PRIMARY KEY NOT NULL,
type TEXT NOT NULL,
state TEXT NOT NULL,
started TEXT NOT NULL,
stopped TEXT
);;
INSERT INTO _migrate_progress
SELECT
id,
type,
'{"percent":' || state || '}' AS state,
started,
stopped
FROM progress
WHERE true;;
DROP TABLE progress;;
ALTER TABLE _migrate_progress
RENAME TO progress;;

View file

@ -1,22 +0,0 @@
-- add secret to users
CREATE TABLE _migrate_users (
id TEXT PRIMARY KEY NOT NULL,
imdb_id TEXT NOT NULL UNIQUE,
name TEXT NOT NULL,
secret TEXT NOT NULL
);;
INSERT INTO _migrate_users
SELECT
id,
imdb_id,
name,
'' AS secret
FROM users
WHERE true;;
DROP TABLE users;;
ALTER TABLE _migrate_users
RENAME TO users;;

View file

@ -1,45 +0,0 @@
-- add group admins
--- remove secrets from groups
CREATE TABLE _migrate_groups (
id TEXT PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
users TEXT NOT NULL -- JSON array
);;
INSERT INTO _migrate_groups
SELECT
id,
name,
users
FROM groups
WHERE true;;
DROP TABLE groups;;
ALTER TABLE _migrate_groups
RENAME TO groups;;
--- add group access to users
CREATE TABLE _migrate_users (
id TEXT PRIMARY KEY NOT NULL,
imdb_id TEXT NOT NULL UNIQUE,
name TEXT NOT NULL,
secret TEXT NOT NULL,
groups TEXT NOT NULL -- JSON array
);;
INSERT INTO _migrate_users
SELECT
id,
imdb_id,
name,
secret,
'[]' AS groups
FROM users
WHERE true;;
DROP TABLE users;;
ALTER TABLE _migrate_users
RENAME TO users;;

View file

@ -1,38 +0,0 @@
-- remove NOTNULL constraint from movies.genres
CREATE TABLE _migrate_movies (
id TEXT PRIMARY KEY NOT NULL,
title TEXT NOT NULL,
original_title TEXT,
release_year INTEGER NOT NULL,
media_type TEXT NOT NULL,
imdb_id TEXT NOT NULL UNIQUE,
imdb_score INTEGER,
imdb_votes INTEGER,
runtime INTEGER,
genres TEXT,
created TEXT NOT NULL,
updated TEXT NOT NULL
);;
INSERT INTO _migrate_movies
SELECT
id,
title,
original_title,
release_year,
media_type,
imdb_id,
imdb_score,
imdb_votes,
runtime,
genres,
created,
updated
FROM movies
WHERE true;;
DROP TABLE movies;;
ALTER TABLE _migrate_movies
RENAME TO movies;;

View file

@ -1,9 +1,13 @@
import re import re
from typing import cast from typing import NewType, cast
import ulid import ulid
from ulid.hints import Buffer from ulid.hints import Buffer
type JSONScalar = int | float | str | None
type JSON = JSONScalar | list["JSON"] | dict[str, "JSON"]
type JSONObject = dict[str, JSON]
class ULID(ulid.ULID): class ULID(ulid.ULID):
"""Extended ULID type. """Extended ULID type.
@ -29,3 +33,16 @@ class ULID(ulid.ULID):
buffer = cast(memoryview, ulid.new().memory) buffer = cast(memoryview, ulid.new().memory)
super().__init__(buffer) super().__init__(buffer)
AwardId = NewType("AwardId", ULID)
GroupId = NewType("GroupId", ULID)
ImdbMovieId = NewType("ImdbMovieId", str) # Pattern: ttXXXXXXXX
ImdbRating = NewType("ImdbRating", float) # Value range: [1.0, 10.0]
ImdbUserId = NewType("ImdbUserId", str) # Pattern: urXXXXXXXX
MovieId = NewType("MovieId", ULID)
MovieIdStr = NewType("MovieIdStr", str)
RatingId = NewType("RatingId", ULID)
Score100 = NewType("Score100", int) # Value range: [0, 100]
UserId = NewType("UserId", ULID)
UserIdStr = NewType("UserIdStr", str)

View file

@ -1,8 +1,12 @@
import base64 import base64
import hashlib import hashlib
import json
import secrets import secrets
from functools import partial
from typing import Any, TypedDict from typing import Any, TypedDict
json_dump = partial(json.dumps, separators=(",", ":"))
def b64encode(b: bytes) -> str: def b64encode(b: bytes) -> str:
return base64.b64encode(b).decode().rstrip("=") return base64.b64encode(b).decode().rstrip("=")

View file

@ -3,7 +3,7 @@ import contextlib
import logging import logging
import secrets import secrets
from json.decoder import JSONDecodeError from json.decoder import JSONDecodeError
from typing import Literal, overload from typing import Any, Literal, Never, TypeGuard, overload
from starlette.applications import Starlette from starlette.applications import Starlette
from starlette.authentication import ( from starlette.authentication import (
@ -20,15 +20,15 @@ from starlette.middleware import Middleware
from starlette.middleware.authentication import AuthenticationMiddleware from starlette.middleware.authentication import AuthenticationMiddleware
from starlette.middleware.cors import CORSMiddleware from starlette.middleware.cors import CORSMiddleware
from starlette.middleware.gzip import GZipMiddleware from starlette.middleware.gzip import GZipMiddleware
from starlette.requests import HTTPConnection from starlette.requests import HTTPConnection, Request
from starlette.responses import JSONResponse from starlette.responses import JSONResponse
from starlette.routing import Mount, Route from starlette.routing import Mount, Route
from . import config, db, imdb, imdb_import, web_models from . import config, db, imdb, imdb_import, web_models
from .db import close_connection_pool, find_movies, find_ratings, open_connection_pool from .db import close_connection_pool, find_movies, find_ratings, open_connection_pool
from .middleware.responsetime import ResponseTimeMiddleware from .middleware.responsetime import ResponseTimeMiddleware
from .models import Group, Movie, User, asplain from .models import Access, Group, Movie, User, asplain
from .types import ULID from .types import JSON, ULID
from .utils import b64decode, b64encode, phc_compare, phc_scrypt from .utils import b64decode, b64encode, phc_compare, phc_scrypt
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -49,12 +49,12 @@ class BearerAuthBackend(AuthenticationBackend):
self.admin_tokens = {v: k for k, v in credentials.items()} self.admin_tokens = {v: k for k, v in credentials.items()}
async def authenticate(self, conn: HTTPConnection): async def authenticate(self, conn: HTTPConnection):
if "authorization" not in conn.headers: if "Authorization" not in conn.headers:
return return
# XXX should we remove the auth header after reading, for security reasons? # XXX should we remove the auth header after reading, for security reasons?
auth = conn.headers["authorization"] auth = conn.headers["Authorization"]
try: try:
scheme, credentials = auth.split() scheme, credentials = auth.split()
except ValueError as err: except ValueError as err:
@ -62,8 +62,7 @@ class BearerAuthBackend(AuthenticationBackend):
roles = [] roles = []
match scheme.lower(): if scheme.lower() == "bearer":
case "bearer":
is_admin = credentials in self.admin_tokens is_admin = credentials in self.admin_tokens
if not is_admin: if not is_admin:
return return
@ -71,24 +70,24 @@ class BearerAuthBackend(AuthenticationBackend):
user = SimpleUser(name) user = SimpleUser(name)
roles.append("admin") roles.append("admin")
case "basic": elif scheme.lower() == "basic":
try: try:
name, secret = b64decode(credentials).decode().split(":") name, secret = b64decode(credentials).decode().split(":")
except Exception as err: except Exception as err:
raise AuthenticationError("Invalid auth credentials") from err raise AuthenticationError("Invalid auth credentials") from err
user = AuthedUser(name, secret) user = AuthedUser(name, secret)
case _: else:
return return
return AuthCredentials(["authenticated", *roles]), user return AuthCredentials(["authenticated", *roles]), user
def truthy(s: str): def truthy(s: str | None) -> bool:
return bool(s) and s.lower() in {"1", "yes", "true"} return bool(s) and s.lower() in {"1", "yes", "true"}
_Yearcomp = Literal["<", "=", ">"] type _Yearcomp = Literal["<", "=", ">"]
def yearcomp(s: str) -> tuple[_Yearcomp, int] | None: def yearcomp(s: str) -> tuple[_Yearcomp, int] | None:
@ -104,7 +103,7 @@ def yearcomp(s: str) -> tuple[_Yearcomp, int] | None:
def as_int( def as_int(
x, *, max: int | None = None, min: int | None = 1, default: int | None = None x: Any, *, max: int | None = None, min: int | None = 1, default: int | None = None
) -> int: ) -> int:
try: try:
if not isinstance(x, int): if not isinstance(x, int):
@ -122,9 +121,9 @@ def as_int(
return default return default
def as_ulid(s: str) -> ULID: def as_ulid(s: Any) -> ULID:
try: try:
if not s: if not isinstance(s, str) or not s:
raise ValueError("Invalid ULID.") raise ValueError("Invalid ULID.")
return ULID(s) return ULID(s)
@ -134,14 +133,17 @@ def as_ulid(s: str) -> ULID:
@overload @overload
async def json_from_body(request) -> dict: ... async def json_from_body(request: Request) -> dict[str, JSON]: ...
@overload @overload
async def json_from_body(request, keys: list[str]) -> list: ... async def json_from_body(request: Request, keys: list[str]) -> list[JSON]: ...
async def json_from_body(request, keys: list[str] | None = None) -> dict | list: async def json_from_body(
request: Request, keys: list[str] | None = None
) -> dict[str, JSON] | list[JSON]:
data: dict[str, JSON]
if not await request.body(): if not await request.body():
data = {} data = {}
@ -152,7 +154,7 @@ async def json_from_body(request, keys: list[str] | None = None) -> dict | list:
raise HTTPException(422, "Invalid JSON content.") from err raise HTTPException(422, "Invalid JSON content.") from err
if not isinstance(data, dict): if not isinstance(data, dict):
raise HTTPException(422, f"Unexpected JSON root type: {type(data)!a}.") raise HTTPException(422, f"Invalid JSON type: {type(data)!a}")
if not keys: if not keys:
return data return data
@ -160,14 +162,14 @@ async def json_from_body(request, keys: list[str] | None = None) -> dict | list:
try: try:
return [data[k] for k in keys] return [data[k] for k in keys]
except KeyError as err: except KeyError as err:
raise HTTPException(422, f"Missing data for key: {err.args[0]!a}") from err raise HTTPException(422, f"Missing data for key: {err.args[0]}") from err
def is_admin(request): def is_admin(request: Request) -> bool:
return "admin" in request.auth.scopes return "admin" in request.auth.scopes
async def auth_user(request) -> User | None: async def auth_user(request: Request) -> User | None:
if not isinstance(request.user, AuthedUser): if not isinstance(request.user, AuthedUser):
return return
@ -196,7 +198,7 @@ def route(path: str, *, methods: list[str] | None = None, **kwds):
@route("/groups/{group_id}/ratings") @route("/groups/{group_id}/ratings")
async def get_ratings_for_group(request): async def get_ratings_for_group(request: Request) -> JSONResponse:
group_id = as_ulid(request.path_params["group_id"]) group_id = as_ulid(request.path_params["group_id"])
async with db.new_connection() as conn: async with db.new_connection() as conn:
@ -233,11 +235,14 @@ async def get_ratings_for_group(request):
user_ids=user_ids, user_ids=user_ids,
) )
ratings = (web_models.Rating(**r) for r in rows) ratings = [web_models.Rating(**r) for r in rows]
aggr = web_models.aggregate_ratings(ratings, user_ids) async with db.new_connection() as conn:
awards = await db.get_awards(conn, imdb_ids=[r.movie_imdb_id for r in ratings])
resp = tuple(asplain(r) for r in aggr) aggrs = web_models.aggregate_ratings(ratings, user_ids, awards_dict=awards)
resp = tuple(asplain(r) for r in aggrs)
return JSONResponse(resp) return JSONResponse(resp)
@ -254,13 +259,13 @@ def not_found(reason: str = "Not Found"):
return JSONResponse({"error": reason}, status_code=404) return JSONResponse({"error": reason}, status_code=404)
def not_implemented(): def not_implemented() -> Never:
raise HTTPException(404, "Not yet implemented.") raise HTTPException(404, "Not yet implemented.")
@route("/movies") @route("/movies")
@requires(["authenticated"]) @requires(["authenticated"])
async def list_movies(request): async def list_movies(request: Request) -> JSONResponse:
params = request.query_params params = request.query_params
user = await auth_user(request) user = await auth_user(request)
@ -275,7 +280,7 @@ async def list_movies(request):
if not group: if not group:
return not_found("Group not found.") return not_found("Group not found.")
is_allowed = is_admin(request) or user and user.has_access(group_id) is_allowed = is_admin(request) or (user and user.has_access(group_id))
if not is_allowed: if not is_allowed:
return forbidden("No access to group.") return forbidden("No access to group.")
@ -285,7 +290,7 @@ async def list_movies(request):
user_id = as_ulid(user_id) user_id = as_ulid(user_id)
# Currently a user may only directly access their own ratings. # Currently a user may only directly access their own ratings.
is_allowed = is_admin(request) or user and user.id == user_id is_allowed = is_admin(request) or (user and user.id == user_id)
if not is_allowed: if not is_allowed:
return forbidden("No access to user.") return forbidden("No access to user.")
@ -333,13 +338,13 @@ async def list_movies(request):
@route("/movies", methods=["POST"]) @route("/movies", methods=["POST"])
@requires(["authenticated", "admin"]) @requires(["authenticated", "admin"])
async def add_movie(request): async def add_movie(request: Request) -> JSONResponse:
not_implemented() not_implemented()
@route("/movies/_reload_imdb", methods=["GET"]) @route("/movies/_reload_imdb", methods=["GET"])
@requires(["authenticated", "admin"]) @requires(["authenticated", "admin"])
async def progress_for_load_imdb_movies(request): async def progress_for_load_imdb_movies(request: Request) -> JSONResponse:
async with db.new_connection() as conn: async with db.new_connection() as conn:
progress = await db.get_import_progress(conn) progress = await db.get_import_progress(conn)
if not progress: if not progress:
@ -370,40 +375,12 @@ async def progress_for_load_imdb_movies(request):
return JSONResponse(resp) return JSONResponse(resp)
@route("/users/{user_id}/[movies/{movie_id}/]watches", methods=["POST"])
@requires(["authenticated"])
async def add_watch_to_user(request):
# {
# id
# movie_id
# location (gps)
# started
# finished
# score
# fav
# }
user_id = as_ulid(request.path_params["user_id"])
geoloc, started = await json_from_body(request, ["geoloc", "started"])
@route("/users/{user_id}/[movies/{movie_id}/]watches/{watch_id}", methods=["PUT"])
@requires(["authenticated"])
async def update_watch_for_user(request):
user_id = as_ulid(request.path_params["user_id"])
watch_id = as_ulid(request.path_params["watch_id"])
finished, score, favorite = await json_from_body(
request, ["finished", "score", "favorite"]
)
_import_lock = asyncio.Lock() _import_lock = asyncio.Lock()
@route("/movies/_reload_imdb", methods=["POST"]) @route("/movies/_reload_imdb", methods=["POST"])
@requires(["authenticated", "admin"]) @requires(["authenticated", "admin"])
async def load_imdb_movies(request): async def load_imdb_movies(request: Request) -> JSONResponse:
params = request.query_params params = request.query_params
force = truthy(params.get("force")) force = truthy(params.get("force"))
@ -427,7 +404,7 @@ async def load_imdb_movies(request):
@route("/users") @route("/users")
@requires(["authenticated", "admin"]) @requires(["authenticated", "admin"])
async def list_users(request): async def list_users(request: Request) -> JSONResponse:
async with db.new_connection() as conn: async with db.new_connection() as conn:
users = await db.get_all(conn, User) users = await db.get_all(conn, User)
@ -436,7 +413,7 @@ async def list_users(request):
@route("/users", methods=["POST"]) @route("/users", methods=["POST"])
@requires(["authenticated", "admin"]) @requires(["authenticated", "admin"])
async def add_user(request): async def add_user(request: Request) -> JSONResponse:
name, imdb_id = await json_from_body(request, ["name", "imdb_id"]) name, imdb_id = await json_from_body(request, ["name", "imdb_id"])
# XXX restrict name # XXX restrict name
@ -458,7 +435,7 @@ async def add_user(request):
@route("/users/{user_id}") @route("/users/{user_id}")
@requires(["authenticated"]) @requires(["authenticated"])
async def show_user(request): async def show_user(request: Request) -> JSONResponse:
user_id = as_ulid(request.path_params["user_id"]) user_id = as_ulid(request.path_params["user_id"])
if is_admin(request): if is_admin(request):
@ -487,7 +464,7 @@ async def show_user(request):
@route("/users/{user_id}", methods=["DELETE"]) @route("/users/{user_id}", methods=["DELETE"])
@requires(["authenticated", "admin"]) @requires(["authenticated", "admin"])
async def remove_user(request): async def remove_user(request: Request) -> JSONResponse:
user_id = as_ulid(request.path_params["user_id"]) user_id = as_ulid(request.path_params["user_id"])
async with db.new_connection() as conn: async with db.new_connection() as conn:
@ -505,7 +482,7 @@ async def remove_user(request):
@route("/users/{user_id}", methods=["PATCH"]) @route("/users/{user_id}", methods=["PATCH"])
@requires(["authenticated"]) @requires(["authenticated"])
async def modify_user(request): async def modify_user(request: Request) -> JSONResponse:
user_id = as_ulid(request.path_params["user_id"]) user_id = as_ulid(request.path_params["user_id"])
if is_admin(request): if is_admin(request):
@ -552,9 +529,13 @@ async def modify_user(request):
return JSONResponse(asplain(user)) return JSONResponse(asplain(user))
def is_valid_access(x: Any) -> TypeGuard[Access]:
return isinstance(x, str) and x in set("riw")
@route("/users/{user_id}/groups", methods=["POST"]) @route("/users/{user_id}/groups", methods=["POST"])
@requires(["authenticated", "admin"]) @requires(["authenticated", "admin"])
async def add_group_to_user(request): async def add_group_to_user(request: Request) -> JSONResponse:
user_id = as_ulid(request.path_params["user_id"]) user_id = as_ulid(request.path_params["user_id"])
async with db.new_connection() as conn: async with db.new_connection() as conn:
@ -569,7 +550,7 @@ async def add_group_to_user(request):
if not group: if not group:
return not_found("Group not found") return not_found("Group not found")
if access not in set("riw"): if not is_valid_access(access):
raise HTTPException(422, "Invalid access level.") raise HTTPException(422, "Invalid access level.")
user.set_access(group_id, access) user.set_access(group_id, access)
@ -581,19 +562,19 @@ async def add_group_to_user(request):
@route("/users/{user_id}/ratings") @route("/users/{user_id}/ratings")
@requires(["private"]) @requires(["private"])
async def ratings_for_user(request): async def ratings_for_user(request: Request) -> JSONResponse:
not_implemented() not_implemented()
@route("/users/{user_id}/ratings", methods=["PUT"]) @route("/users/{user_id}/ratings", methods=["PUT"])
@requires("authenticated") @requires("authenticated")
async def set_rating_for_user(request): async def set_rating_for_user(request: Request) -> JSONResponse:
not_implemented() not_implemented()
@route("/users/_reload_ratings", methods=["POST"]) @route("/users/_reload_ratings", methods=["POST"])
@requires(["authenticated", "admin"]) @requires(["authenticated", "admin"])
async def load_imdb_user_ratings(request): async def load_imdb_user_ratings(request: Request) -> JSONResponse:
ratings = [rating async for rating in imdb.refresh_user_ratings_from_imdb()] ratings = [rating async for rating in imdb.refresh_user_ratings_from_imdb()]
return JSONResponse({"new_ratings": [asplain(r) for r in ratings]}) return JSONResponse({"new_ratings": [asplain(r) for r in ratings]})
@ -601,7 +582,7 @@ async def load_imdb_user_ratings(request):
@route("/groups") @route("/groups")
@requires(["authenticated", "admin"]) @requires(["authenticated", "admin"])
async def list_groups(request): async def list_groups(request: Request) -> JSONResponse:
async with db.new_connection() as conn: async with db.new_connection() as conn:
groups = await db.get_all(conn, Group) groups = await db.get_all(conn, Group)
@ -610,7 +591,7 @@ async def list_groups(request):
@route("/groups", methods=["POST"]) @route("/groups", methods=["POST"])
@requires(["authenticated", "admin"]) @requires(["authenticated", "admin"])
async def add_group(request): async def add_group(request: Request) -> JSONResponse:
(name,) = await json_from_body(request, ["name"]) (name,) = await json_from_body(request, ["name"])
# XXX restrict name # XXX restrict name
@ -624,7 +605,7 @@ async def add_group(request):
@route("/groups/{group_id}/users", methods=["POST"]) @route("/groups/{group_id}/users", methods=["POST"])
@requires(["authenticated"]) @requires(["authenticated"])
async def add_user_to_group(request): async def add_user_to_group(request: Request) -> JSONResponse:
group_id = as_ulid(request.path_params["group_id"]) group_id = as_ulid(request.path_params["group_id"])
async with db.new_connection() as conn: async with db.new_connection() as conn:
group = await db.get(conn, Group, id=str(group_id)) group = await db.get(conn, Group, id=str(group_id))
@ -660,11 +641,12 @@ async def add_user_to_group(request):
return JSONResponse(asplain(group)) return JSONResponse(asplain(group))
async def http_exception(request, exc): async def http_exception(request: Request, exc: Exception) -> JSONResponse:
assert isinstance(exc, HTTPException)
return JSONResponse({"error": exc.detail}, status_code=exc.status_code) return JSONResponse({"error": exc.detail}, status_code=exc.status_code)
def auth_error(request, err): def auth_error(conn: HTTPConnection, err: Exception) -> JSONResponse:
return unauthorized(str(err)) return unauthorized(str(err))
@ -690,7 +672,7 @@ def create_app():
Mount(f"{config.api_base}v1", routes=_routes), Mount(f"{config.api_base}v1", routes=_routes),
], ],
middleware=[ middleware=[
Middleware(ResponseTimeMiddleware, header_name="unwind-elapsed"), Middleware(ResponseTimeMiddleware, header_name="Unwind-Elapsed"),
Middleware( Middleware(
AuthenticationMiddleware, AuthenticationMiddleware,
backend=BearerAuthBackend(config.api_credentials), backend=BearerAuthBackend(config.api_credentials),

View file

@ -1,23 +1,22 @@
from dataclasses import dataclass from dataclasses import dataclass
from typing import Container, Iterable from typing import Container, Iterable
from . import imdb, models from . import imdb, models, types
URL = str type URL = str
Score100 = int # [0, 100]
@dataclass @dataclass
class Rating: class Rating:
canonical_title: str canonical_title: str
imdb_score: Score100 | None imdb_score: types.Score100 | None
imdb_votes: int | None imdb_votes: int | None
media_type: str media_type: str
movie_imdb_id: str movie_imdb_id: types.ImdbMovieId
original_title: str | None original_title: str | None
release_year: int release_year: int
user_id: str | None user_id: types.UserIdStr | None
user_score: Score100 | None user_score: types.Score100 | None
@classmethod @classmethod
def from_movie(cls, movie: models.Movie, *, rating: models.Rating | None = None): def from_movie(cls, movie: models.Movie, *, rating: models.Rating | None = None):
@ -37,13 +36,14 @@ class Rating:
@dataclass @dataclass
class RatingAggregate: class RatingAggregate:
canonical_title: str canonical_title: str
imdb_score: Score100 | None imdb_score: types.Score100 | None
imdb_votes: int | None imdb_votes: int | None
link: URL link: URL
media_type: str media_type: str
original_title: str | None original_title: str | None
user_scores: list[Score100] user_scores: list[types.Score100]
year: int year: int
awards: list[str]
@classmethod @classmethod
def from_movie(cls, movie: models.Movie, *, ratings: Iterable[models.Rating] = []): def from_movie(cls, movie: models.Movie, *, ratings: Iterable[models.Rating] = []):
@ -56,15 +56,31 @@ class RatingAggregate:
original_title=movie.original_title, original_title=movie.original_title,
user_scores=[r.score for r in ratings], user_scores=[r.score for r in ratings],
year=movie.release_year, year=movie.release_year,
awards=[],
) )
def _serialize_award(award: models.Award) -> str:
if award.category == "oscars":
return f"{award.category}:{award.name}"
elif award.category.startswith("imdb-"):
return f"{award.category}:{award.position}"
raise RuntimeError(f"Unsupported category: {award.category}")
def aggregate_ratings( def aggregate_ratings(
ratings: Iterable[Rating], user_ids: Container[str] ratings: Iterable[Rating],
user_ids: Container[types.UserIdStr],
*,
awards_dict: dict[types.ImdbMovieId, list[models.Award]] | None = None,
) -> Iterable[RatingAggregate]: ) -> Iterable[RatingAggregate]:
aggr: dict[str, RatingAggregate] = {} if awards_dict is None:
awards_dict = {}
aggr: dict[types.ImdbMovieId, RatingAggregate] = {}
for r in ratings: for r in ratings:
awards = awards_dict.get(r.movie_imdb_id, [])
mov = aggr.setdefault( mov = aggr.setdefault(
r.movie_imdb_id, r.movie_imdb_id,
RatingAggregate( RatingAggregate(
@ -76,6 +92,7 @@ def aggregate_ratings(
original_title=r.original_title, original_title=r.original_title,
user_scores=[], user_scores=[],
year=r.release_year, year=r.release_year,
awards=sorted(_serialize_award(a) for a in awards),
), ),
) )
# XXX do we need this? why don't we just get the ratings we're supposed to aggregate? # XXX do we need this? why don't we just get the ratings we're supposed to aggregate?

518
uv.lock generated Normal file
View file

@ -0,0 +1,518 @@
version = 1
revision = 2
requires-python = ">=3.13"
[[package]]
name = "aiosqlite"
version = "0.21.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/13/7d/8bca2bf9a247c2c5dfeec1d7a5f40db6518f88d314b8bca9da29670d2671/aiosqlite-0.21.0.tar.gz", hash = "sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3", size = 13454, upload-time = "2025-02-03T07:30:16.235Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f5/10/6c25ed6de94c49f88a91fa5018cb4c0f3625f31d5be9f771ebe5cc7cd506/aiosqlite-0.21.0-py3-none-any.whl", hash = "sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0", size = 15792, upload-time = "2025-02-03T07:30:13.6Z" },
]
[[package]]
name = "alembic"
version = "1.15.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "mako" },
{ name = "sqlalchemy" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e6/57/e314c31b261d1e8a5a5f1908065b4ff98270a778ce7579bd4254477209a7/alembic-1.15.2.tar.gz", hash = "sha256:1c72391bbdeffccfe317eefba686cb9a3c078005478885413b95c3b26c57a8a7", size = 1925573, upload-time = "2025-03-28T13:52:00.443Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/41/18/d89a443ed1ab9bcda16264716f809c663866d4ca8de218aa78fd50b38ead/alembic-1.15.2-py3-none-any.whl", hash = "sha256:2e76bd916d547f6900ec4bb5a90aeac1485d2c92536923d0b138c02b126edc53", size = 231911, upload-time = "2025-03-28T13:52:02.218Z" },
]
[[package]]
name = "anyio"
version = "4.9.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "sniffio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" },
]
[[package]]
name = "beautifulsoup4"
version = "4.13.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "soupsieve" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d8/e4/0c4c39e18fd76d6a628d4dd8da40543d136ce2d1752bd6eeeab0791f4d6b/beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195", size = 621067, upload-time = "2025-04-15T17:05:13.836Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/50/cd/30110dc0ffcf3b131156077b90e9f60ed75711223f306da4db08eff8403b/beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b", size = 187285, upload-time = "2025-04-15T17:05:12.221Z" },
]
[[package]]
name = "certifi"
version = "2025.4.26"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" },
]
[[package]]
name = "click"
version = "8.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/cd/0f/62ca20172d4f87d93cf89665fbaedcd560ac48b465bd1d92bfc7ea6b0a41/click-8.2.0.tar.gz", hash = "sha256:f5452aeddd9988eefa20f90f05ab66f17fce1ee2a36907fd30b05bbb5953814d", size = 235857, upload-time = "2025-05-10T22:21:03.111Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a2/58/1f37bf81e3c689cc74ffa42102fa8915b59085f54a6e4a80bc6265c0f6bf/click-8.2.0-py3-none-any.whl", hash = "sha256:6b303f0b2aa85f1cb4e5303078fadcbcd4e476f114fab9b5007005711839325c", size = 102156, upload-time = "2025-05-10T22:21:01.352Z" },
]
[[package]]
name = "colorama"
version = "0.4.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
]
[[package]]
name = "coverage"
version = "7.8.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/19/4f/2251e65033ed2ce1e68f00f91a0294e0f80c80ae8c3ebbe2f12828c4cd53/coverage-7.8.0.tar.gz", hash = "sha256:7a3d62b3b03b4b6fd41a085f3574874cf946cb4604d2b4d3e8dca8cd570ca501", size = 811872, upload-time = "2025-03-30T20:36:45.376Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f3/21/87e9b97b568e223f3438d93072479c2f36cc9b3f6b9f7094b9d50232acc0/coverage-7.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ac46d0c2dd5820ce93943a501ac5f6548ea81594777ca585bf002aa8854cacd", size = 211708, upload-time = "2025-03-30T20:35:47.417Z" },
{ url = "https://files.pythonhosted.org/packages/75/be/882d08b28a0d19c9c4c2e8a1c6ebe1f79c9c839eb46d4fca3bd3b34562b9/coverage-7.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:771eb7587a0563ca5bb6f622b9ed7f9d07bd08900f7589b4febff05f469bea00", size = 211981, upload-time = "2025-03-30T20:35:49.002Z" },
{ url = "https://files.pythonhosted.org/packages/7a/1d/ce99612ebd58082fbe3f8c66f6d8d5694976c76a0d474503fa70633ec77f/coverage-7.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42421e04069fb2cbcbca5a696c4050b84a43b05392679d4068acbe65449b5c64", size = 245495, upload-time = "2025-03-30T20:35:51.073Z" },
{ url = "https://files.pythonhosted.org/packages/dc/8d/6115abe97df98db6b2bd76aae395fcc941d039a7acd25f741312ced9a78f/coverage-7.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554fec1199d93ab30adaa751db68acec2b41c5602ac944bb19187cb9a41a8067", size = 242538, upload-time = "2025-03-30T20:35:52.941Z" },
{ url = "https://files.pythonhosted.org/packages/cb/74/2f8cc196643b15bc096d60e073691dadb3dca48418f08bc78dd6e899383e/coverage-7.8.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aaeb00761f985007b38cf463b1d160a14a22c34eb3f6a39d9ad6fc27cb73008", size = 244561, upload-time = "2025-03-30T20:35:54.658Z" },
{ url = "https://files.pythonhosted.org/packages/22/70/c10c77cd77970ac965734fe3419f2c98665f6e982744a9bfb0e749d298f4/coverage-7.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:581a40c7b94921fffd6457ffe532259813fc68eb2bdda60fa8cc343414ce3733", size = 244633, upload-time = "2025-03-30T20:35:56.221Z" },
{ url = "https://files.pythonhosted.org/packages/38/5a/4f7569d946a07c952688debee18c2bb9ab24f88027e3d71fd25dbc2f9dca/coverage-7.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f319bae0321bc838e205bf9e5bc28f0a3165f30c203b610f17ab5552cff90323", size = 242712, upload-time = "2025-03-30T20:35:57.801Z" },
{ url = "https://files.pythonhosted.org/packages/bb/a1/03a43b33f50475a632a91ea8c127f7e35e53786dbe6781c25f19fd5a65f8/coverage-7.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04bfec25a8ef1c5f41f5e7e5c842f6b615599ca8ba8391ec33a9290d9d2db3a3", size = 244000, upload-time = "2025-03-30T20:35:59.378Z" },
{ url = "https://files.pythonhosted.org/packages/6a/89/ab6c43b1788a3128e4d1b7b54214548dcad75a621f9d277b14d16a80d8a1/coverage-7.8.0-cp313-cp313-win32.whl", hash = "sha256:dd19608788b50eed889e13a5d71d832edc34fc9dfce606f66e8f9f917eef910d", size = 214195, upload-time = "2025-03-30T20:36:01.005Z" },
{ url = "https://files.pythonhosted.org/packages/12/12/6bf5f9a8b063d116bac536a7fb594fc35cb04981654cccb4bbfea5dcdfa0/coverage-7.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:a9abbccd778d98e9c7e85038e35e91e67f5b520776781d9a1e2ee9d400869487", size = 214998, upload-time = "2025-03-30T20:36:03.006Z" },
{ url = "https://files.pythonhosted.org/packages/2a/e6/1e9df74ef7a1c983a9c7443dac8aac37a46f1939ae3499424622e72a6f78/coverage-7.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:18c5ae6d061ad5b3e7eef4363fb27a0576012a7447af48be6c75b88494c6cf25", size = 212541, upload-time = "2025-03-30T20:36:04.638Z" },
{ url = "https://files.pythonhosted.org/packages/04/51/c32174edb7ee49744e2e81c4b1414ac9df3dacfcb5b5f273b7f285ad43f6/coverage-7.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:95aa6ae391a22bbbce1b77ddac846c98c5473de0372ba5c463480043a07bff42", size = 212767, upload-time = "2025-03-30T20:36:06.503Z" },
{ url = "https://files.pythonhosted.org/packages/e9/8f/f454cbdb5212f13f29d4a7983db69169f1937e869a5142bce983ded52162/coverage-7.8.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e013b07ba1c748dacc2a80e69a46286ff145935f260eb8c72df7185bf048f502", size = 256997, upload-time = "2025-03-30T20:36:08.137Z" },
{ url = "https://files.pythonhosted.org/packages/e6/74/2bf9e78b321216d6ee90a81e5c22f912fc428442c830c4077b4a071db66f/coverage-7.8.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d766a4f0e5aa1ba056ec3496243150698dc0481902e2b8559314368717be82b1", size = 252708, upload-time = "2025-03-30T20:36:09.781Z" },
{ url = "https://files.pythonhosted.org/packages/92/4d/50d7eb1e9a6062bee6e2f92e78b0998848a972e9afad349b6cdde6fa9e32/coverage-7.8.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad80e6b4a0c3cb6f10f29ae4c60e991f424e6b14219d46f1e7d442b938ee68a4", size = 255046, upload-time = "2025-03-30T20:36:11.409Z" },
{ url = "https://files.pythonhosted.org/packages/40/9e/71fb4e7402a07c4198ab44fc564d09d7d0ffca46a9fb7b0a7b929e7641bd/coverage-7.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b87eb6fc9e1bb8f98892a2458781348fa37e6925f35bb6ceb9d4afd54ba36c73", size = 256139, upload-time = "2025-03-30T20:36:13.86Z" },
{ url = "https://files.pythonhosted.org/packages/49/1a/78d37f7a42b5beff027e807c2843185961fdae7fe23aad5a4837c93f9d25/coverage-7.8.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d1ba00ae33be84066cfbe7361d4e04dec78445b2b88bdb734d0d1cbab916025a", size = 254307, upload-time = "2025-03-30T20:36:16.074Z" },
{ url = "https://files.pythonhosted.org/packages/58/e9/8fb8e0ff6bef5e170ee19d59ca694f9001b2ec085dc99b4f65c128bb3f9a/coverage-7.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f3c38e4e5ccbdc9198aecc766cedbb134b2d89bf64533973678dfcf07effd883", size = 255116, upload-time = "2025-03-30T20:36:18.033Z" },
{ url = "https://files.pythonhosted.org/packages/56/b0/d968ecdbe6fe0a863de7169bbe9e8a476868959f3af24981f6a10d2b6924/coverage-7.8.0-cp313-cp313t-win32.whl", hash = "sha256:379fe315e206b14e21db5240f89dc0774bdd3e25c3c58c2c733c99eca96f1ada", size = 214909, upload-time = "2025-03-30T20:36:19.644Z" },
{ url = "https://files.pythonhosted.org/packages/87/e9/d6b7ef9fecf42dfb418d93544af47c940aa83056c49e6021a564aafbc91f/coverage-7.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2e4b6b87bb0c846a9315e3ab4be2d52fac905100565f4b92f02c445c8799e257", size = 216068, upload-time = "2025-03-30T20:36:21.282Z" },
{ url = "https://files.pythonhosted.org/packages/59/f1/4da7717f0063a222db253e7121bd6a56f6fb1ba439dcc36659088793347c/coverage-7.8.0-py3-none-any.whl", hash = "sha256:dbf364b4c5e7bae9250528167dfe40219b62e2d573c854d74be213e1e52069f7", size = 203435, upload-time = "2025-03-30T20:36:43.61Z" },
]
[[package]]
name = "greenlet"
version = "3.2.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/34/c1/a82edae11d46c0d83481aacaa1e578fea21d94a1ef400afd734d47ad95ad/greenlet-3.2.2.tar.gz", hash = "sha256:ad053d34421a2debba45aa3cc39acf454acbcd025b3fc1a9f8a0dee237abd485", size = 185797, upload-time = "2025-05-09T19:47:35.066Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/89/30/97b49779fff8601af20972a62cc4af0c497c1504dfbb3e93be218e093f21/greenlet-3.2.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:3ab7194ee290302ca15449f601036007873028712e92ca15fc76597a0aeb4c59", size = 269150, upload-time = "2025-05-09T14:50:30.784Z" },
{ url = "https://files.pythonhosted.org/packages/21/30/877245def4220f684bc2e01df1c2e782c164e84b32e07373992f14a2d107/greenlet-3.2.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dc5c43bb65ec3669452af0ab10729e8fdc17f87a1f2ad7ec65d4aaaefabf6bf", size = 637381, upload-time = "2025-05-09T15:24:12.893Z" },
{ url = "https://files.pythonhosted.org/packages/8e/16/adf937908e1f913856b5371c1d8bdaef5f58f251d714085abeea73ecc471/greenlet-3.2.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:decb0658ec19e5c1f519faa9a160c0fc85a41a7e6654b3ce1b44b939f8bf1325", size = 651427, upload-time = "2025-05-09T15:24:51.074Z" },
{ url = "https://files.pythonhosted.org/packages/ad/49/6d79f58fa695b618654adac64e56aff2eeb13344dc28259af8f505662bb1/greenlet-3.2.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6fadd183186db360b61cb34e81117a096bff91c072929cd1b529eb20dd46e6c5", size = 645795, upload-time = "2025-05-09T15:29:26.673Z" },
{ url = "https://files.pythonhosted.org/packages/5a/e6/28ed5cb929c6b2f001e96b1d0698c622976cd8f1e41fe7ebc047fa7c6dd4/greenlet-3.2.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1919cbdc1c53ef739c94cf2985056bcc0838c1f217b57647cbf4578576c63825", size = 648398, upload-time = "2025-05-09T14:53:36.61Z" },
{ url = "https://files.pythonhosted.org/packages/9d/70/b200194e25ae86bc57077f695b6cc47ee3118becf54130c5514456cf8dac/greenlet-3.2.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3885f85b61798f4192d544aac7b25a04ece5fe2704670b4ab73c2d2c14ab740d", size = 606795, upload-time = "2025-05-09T14:53:47.039Z" },
{ url = "https://files.pythonhosted.org/packages/f8/c8/ba1def67513a941154ed8f9477ae6e5a03f645be6b507d3930f72ed508d3/greenlet-3.2.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:85f3e248507125bf4af607a26fd6cb8578776197bd4b66e35229cdf5acf1dfbf", size = 1117976, upload-time = "2025-05-09T15:27:06.542Z" },
{ url = "https://files.pythonhosted.org/packages/c3/30/d0e88c1cfcc1b3331d63c2b54a0a3a4a950ef202fb8b92e772ca714a9221/greenlet-3.2.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1e76106b6fc55fa3d6fe1c527f95ee65e324a13b62e243f77b48317346559708", size = 1145509, upload-time = "2025-05-09T14:54:02.223Z" },
{ url = "https://files.pythonhosted.org/packages/90/2e/59d6491834b6e289051b252cf4776d16da51c7c6ca6a87ff97e3a50aa0cd/greenlet-3.2.2-cp313-cp313-win_amd64.whl", hash = "sha256:fe46d4f8e94e637634d54477b0cfabcf93c53f29eedcbdeecaf2af32029b4421", size = 296023, upload-time = "2025-05-09T14:53:24.157Z" },
{ url = "https://files.pythonhosted.org/packages/65/66/8a73aace5a5335a1cba56d0da71b7bd93e450f17d372c5b7c5fa547557e9/greenlet-3.2.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba30e88607fb6990544d84caf3c706c4b48f629e18853fc6a646f82db9629418", size = 629911, upload-time = "2025-05-09T15:24:22.376Z" },
{ url = "https://files.pythonhosted.org/packages/48/08/c8b8ebac4e0c95dcc68ec99198842e7db53eda4ab3fb0a4e785690883991/greenlet-3.2.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:055916fafad3e3388d27dd68517478933a97edc2fc54ae79d3bec827de2c64c4", size = 635251, upload-time = "2025-05-09T15:24:52.205Z" },
{ url = "https://files.pythonhosted.org/packages/37/26/7db30868f73e86b9125264d2959acabea132b444b88185ba5c462cb8e571/greenlet-3.2.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2593283bf81ca37d27d110956b79e8723f9aa50c4bcdc29d3c0543d4743d2763", size = 632620, upload-time = "2025-05-09T15:29:28.051Z" },
{ url = "https://files.pythonhosted.org/packages/10/ec/718a3bd56249e729016b0b69bee4adea0dfccf6ca43d147ef3b21edbca16/greenlet-3.2.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89c69e9a10670eb7a66b8cef6354c24671ba241f46152dd3eed447f79c29fb5b", size = 628851, upload-time = "2025-05-09T14:53:38.472Z" },
{ url = "https://files.pythonhosted.org/packages/9b/9d/d1c79286a76bc62ccdc1387291464af16a4204ea717f24e77b0acd623b99/greenlet-3.2.2-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02a98600899ca1ca5d3a2590974c9e3ec259503b2d6ba6527605fcd74e08e207", size = 593718, upload-time = "2025-05-09T14:53:48.313Z" },
{ url = "https://files.pythonhosted.org/packages/cd/41/96ba2bf948f67b245784cd294b84e3d17933597dffd3acdb367a210d1949/greenlet-3.2.2-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:b50a8c5c162469c3209e5ec92ee4f95c8231b11db6a04db09bbe338176723bb8", size = 1105752, upload-time = "2025-05-09T15:27:08.217Z" },
{ url = "https://files.pythonhosted.org/packages/68/3b/3b97f9d33c1f2eb081759da62bd6162159db260f602f048bc2f36b4c453e/greenlet-3.2.2-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:45f9f4853fb4cc46783085261c9ec4706628f3b57de3e68bae03e8f8b3c0de51", size = 1125170, upload-time = "2025-05-09T14:54:04.082Z" },
{ url = "https://files.pythonhosted.org/packages/31/df/b7d17d66c8d0f578d2885a3d8f565e9e4725eacc9d3fdc946d0031c055c4/greenlet-3.2.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:9ea5231428af34226c05f927e16fc7f6fa5e39e3ad3cd24ffa48ba53a47f4240", size = 269899, upload-time = "2025-05-09T14:54:01.581Z" },
]
[[package]]
name = "h11"
version = "0.16.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
]
[[package]]
name = "honcho"
version = "2.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/65/c8/d860888358bf5c8a6e7d78d1b508b59b0e255afd5655f243b8f65166dafd/honcho-2.0.0.tar.gz", hash = "sha256:af3815c03c634bf67d50f114253ea9fef72ecff26e4fd06b29234789ac5b8b2e", size = 45618, upload-time = "2024-10-06T14:26:53.871Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/48/1c/25631fc359955569e63f5446dbb7022c320edf9846cbe892ee5113433a7e/honcho-2.0.0-py3-none-any.whl", hash = "sha256:56dcd04fc72d362a4befb9303b1a1a812cba5da283526fbc6509be122918ddf3", size = 22093, upload-time = "2024-10-06T14:26:52.181Z" },
]
[[package]]
name = "html5lib"
version = "1.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "six" },
{ name = "webencodings" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ac/b6/b55c3f49042f1df3dcd422b7f224f939892ee94f22abcf503a9b7339eaf2/html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f", size = 272215, upload-time = "2020-06-22T23:32:38.834Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/6c/dd/a834df6482147d48e225a49515aabc28974ad5a4ca3215c18a882565b028/html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d", size = 112173, upload-time = "2020-06-22T23:32:36.781Z" },
]
[[package]]
name = "httpcore"
version = "1.0.9"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
{ name = "h11" },
]
sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
]
[[package]]
name = "httpx"
version = "0.28.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "certifi" },
{ name = "httpcore" },
{ name = "idna" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
]
[[package]]
name = "idna"
version = "3.10"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
]
[[package]]
name = "iniconfig"
version = "2.1.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
]
[[package]]
name = "mako"
version = "1.3.10"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "markupsafe" },
]
sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" },
]
[[package]]
name = "markupsafe"
version = "3.0.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" },
{ url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" },
{ url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" },
{ url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" },
{ url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" },
{ url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" },
{ url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" },
{ url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" },
{ url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" },
{ url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" },
{ url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" },
{ url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" },
{ url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" },
{ url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" },
{ url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" },
{ url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" },
{ url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" },
{ url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" },
{ url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" },
{ url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" },
]
[[package]]
name = "nodeenv"
version = "1.9.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" },
]
[[package]]
name = "packaging"
version = "25.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
]
[[package]]
name = "pluggy"
version = "1.6.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
]
[[package]]
name = "pyright"
version = "1.1.400"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "nodeenv" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/6c/cb/c306618a02d0ee8aed5fb8d0fe0ecfed0dbf075f71468f03a30b5f4e1fe0/pyright-1.1.400.tar.gz", hash = "sha256:b8a3ba40481aa47ba08ffb3228e821d22f7d391f83609211335858bf05686bdb", size = 3846546, upload-time = "2025-04-24T12:55:18.907Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c8/a5/5d285e4932cf149c90e3c425610c5efaea005475d5f96f1bfdb452956c62/pyright-1.1.400-py3-none-any.whl", hash = "sha256:c80d04f98b5a4358ad3a35e241dbf2a408eee33a40779df365644f8054d2517e", size = 5563460, upload-time = "2025-04-24T12:55:17.002Z" },
]
[[package]]
name = "pytest"
version = "8.3.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
{ name = "iniconfig" },
{ name = "packaging" },
{ name = "pluggy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891, upload-time = "2025-03-02T12:54:54.503Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634, upload-time = "2025-03-02T12:54:52.069Z" },
]
[[package]]
name = "pytest-asyncio"
version = "0.26.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pytest" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8e/c4/453c52c659521066969523e87d85d54139bbd17b78f09532fb8eb8cdb58e/pytest_asyncio-0.26.0.tar.gz", hash = "sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f", size = 54156, upload-time = "2025-03-25T06:22:28.883Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/20/7f/338843f449ace853647ace35870874f69a764d251872ed1b4de9f234822c/pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0", size = 19694, upload-time = "2025-03-25T06:22:27.807Z" },
]
[[package]]
name = "pytest-cov"
version = "6.1.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "coverage" },
{ name = "pytest" },
]
sdist = { url = "https://files.pythonhosted.org/packages/25/69/5f1e57f6c5a39f81411b550027bf72842c4567ff5fd572bed1edc9e4b5d9/pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a", size = 66857, upload-time = "2025-04-05T14:07:51.592Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/28/d0/def53b4a790cfb21483016430ed828f64830dd981ebe1089971cd10cab25/pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde", size = 23841, upload-time = "2025-04-05T14:07:49.641Z" },
]
[[package]]
name = "ruff"
version = "0.11.10"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e8/4c/4a3c5a97faaae6b428b336dcca81d03ad04779f8072c267ad2bd860126bf/ruff-0.11.10.tar.gz", hash = "sha256:d522fb204b4959909ecac47da02830daec102eeb100fb50ea9554818d47a5fa6", size = 4165632, upload-time = "2025-05-15T14:08:56.76Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2f/9f/596c628f8824a2ce4cd12b0f0b4c0629a62dfffc5d0f742c19a1d71be108/ruff-0.11.10-py3-none-linux_armv6l.whl", hash = "sha256:859a7bfa7bc8888abbea31ef8a2b411714e6a80f0d173c2a82f9041ed6b50f58", size = 10316243, upload-time = "2025-05-15T14:08:12.884Z" },
{ url = "https://files.pythonhosted.org/packages/3c/38/c1e0b77ab58b426f8c332c1d1d3432d9fc9a9ea622806e208220cb133c9e/ruff-0.11.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:968220a57e09ea5e4fd48ed1c646419961a0570727c7e069842edd018ee8afed", size = 11083636, upload-time = "2025-05-15T14:08:16.551Z" },
{ url = "https://files.pythonhosted.org/packages/23/41/b75e15961d6047d7fe1b13886e56e8413be8467a4e1be0a07f3b303cd65a/ruff-0.11.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1067245bad978e7aa7b22f67113ecc6eb241dca0d9b696144256c3a879663bca", size = 10441624, upload-time = "2025-05-15T14:08:19.032Z" },
{ url = "https://files.pythonhosted.org/packages/b6/2c/e396b6703f131406db1811ea3d746f29d91b41bbd43ad572fea30da1435d/ruff-0.11.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4854fd09c7aed5b1590e996a81aeff0c9ff51378b084eb5a0b9cd9518e6cff2", size = 10624358, upload-time = "2025-05-15T14:08:21.542Z" },
{ url = "https://files.pythonhosted.org/packages/bd/8c/ee6cca8bdaf0f9a3704796022851a33cd37d1340bceaf4f6e991eb164e2e/ruff-0.11.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b4564e9f99168c0f9195a0fd5fa5928004b33b377137f978055e40008a082c5", size = 10176850, upload-time = "2025-05-15T14:08:23.682Z" },
{ url = "https://files.pythonhosted.org/packages/e9/ce/4e27e131a434321b3b7c66512c3ee7505b446eb1c8a80777c023f7e876e6/ruff-0.11.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b6a9cc5b62c03cc1fea0044ed8576379dbaf751d5503d718c973d5418483641", size = 11759787, upload-time = "2025-05-15T14:08:25.733Z" },
{ url = "https://files.pythonhosted.org/packages/58/de/1e2e77fc72adc7cf5b5123fd04a59ed329651d3eab9825674a9e640b100b/ruff-0.11.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:607ecbb6f03e44c9e0a93aedacb17b4eb4f3563d00e8b474298a201622677947", size = 12430479, upload-time = "2025-05-15T14:08:28.013Z" },
{ url = "https://files.pythonhosted.org/packages/07/ed/af0f2340f33b70d50121628ef175523cc4c37619e98d98748c85764c8d88/ruff-0.11.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b3a522fa389402cd2137df9ddefe848f727250535c70dafa840badffb56b7a4", size = 11919760, upload-time = "2025-05-15T14:08:30.956Z" },
{ url = "https://files.pythonhosted.org/packages/24/09/d7b3d3226d535cb89234390f418d10e00a157b6c4a06dfbe723e9322cb7d/ruff-0.11.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f071b0deed7e9245d5820dac235cbdd4ef99d7b12ff04c330a241ad3534319f", size = 14041747, upload-time = "2025-05-15T14:08:33.297Z" },
{ url = "https://files.pythonhosted.org/packages/62/b3/a63b4e91850e3f47f78795e6630ee9266cb6963de8f0191600289c2bb8f4/ruff-0.11.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a60e3a0a617eafba1f2e4186d827759d65348fa53708ca547e384db28406a0b", size = 11550657, upload-time = "2025-05-15T14:08:35.639Z" },
{ url = "https://files.pythonhosted.org/packages/46/63/a4f95c241d79402ccdbdb1d823d156c89fbb36ebfc4289dce092e6c0aa8f/ruff-0.11.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:da8ec977eaa4b7bf75470fb575bea2cb41a0e07c7ea9d5a0a97d13dbca697bf2", size = 10489671, upload-time = "2025-05-15T14:08:38.437Z" },
{ url = "https://files.pythonhosted.org/packages/6a/9b/c2238bfebf1e473495659c523d50b1685258b6345d5ab0b418ca3f010cd7/ruff-0.11.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ddf8967e08227d1bd95cc0851ef80d2ad9c7c0c5aab1eba31db49cf0a7b99523", size = 10160135, upload-time = "2025-05-15T14:08:41.247Z" },
{ url = "https://files.pythonhosted.org/packages/ba/ef/ba7251dd15206688dbfba7d413c0312e94df3b31b08f5d695580b755a899/ruff-0.11.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5a94acf798a82db188f6f36575d80609072b032105d114b0f98661e1679c9125", size = 11170179, upload-time = "2025-05-15T14:08:43.762Z" },
{ url = "https://files.pythonhosted.org/packages/73/9f/5c336717293203ba275dbfa2ea16e49b29a9fd9a0ea8b6febfc17e133577/ruff-0.11.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3afead355f1d16d95630df28d4ba17fb2cb9c8dfac8d21ced14984121f639bad", size = 11626021, upload-time = "2025-05-15T14:08:46.451Z" },
{ url = "https://files.pythonhosted.org/packages/d9/2b/162fa86d2639076667c9aa59196c020dc6d7023ac8f342416c2f5ec4bda0/ruff-0.11.10-py3-none-win32.whl", hash = "sha256:dc061a98d32a97211af7e7f3fa1d4ca2fcf919fb96c28f39551f35fc55bdbc19", size = 10494958, upload-time = "2025-05-15T14:08:49.601Z" },
{ url = "https://files.pythonhosted.org/packages/24/f3/66643d8f32f50a4b0d09a4832b7d919145ee2b944d43e604fbd7c144d175/ruff-0.11.10-py3-none-win_amd64.whl", hash = "sha256:5cc725fbb4d25b0f185cb42df07ab6b76c4489b4bfb740a175f3a59c70e8a224", size = 11650285, upload-time = "2025-05-15T14:08:52.392Z" },
{ url = "https://files.pythonhosted.org/packages/95/3a/2e8704d19f376c799748ff9cb041225c1d59f3e7711bc5596c8cfdc24925/ruff-0.11.10-py3-none-win_arm64.whl", hash = "sha256:ef69637b35fb8b210743926778d0e45e1bffa850a7c61e428c6b971549b5f5d1", size = 10765278, upload-time = "2025-05-15T14:08:54.56Z" },
]
[[package]]
name = "six"
version = "1.17.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
]
[[package]]
name = "sniffio"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
]
[[package]]
name = "soupsieve"
version = "2.7"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/3f/f4/4a80cd6ef364b2e8b65b15816a843c0980f7a5a2b4dc701fc574952aa19f/soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a", size = 103418, upload-time = "2025-04-20T18:50:08.518Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677, upload-time = "2025-04-20T18:50:07.196Z" },
]
[[package]]
name = "sqlalchemy"
version = "2.0.41"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/63/66/45b165c595ec89aa7dcc2c1cd222ab269bc753f1fc7a1e68f8481bd957bf/sqlalchemy-2.0.41.tar.gz", hash = "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9", size = 9689424, upload-time = "2025-05-14T17:10:32.339Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d3/ad/2e1c6d4f235a97eeef52d0200d8ddda16f6c4dd70ae5ad88c46963440480/sqlalchemy-2.0.41-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443", size = 2115491, upload-time = "2025-05-14T17:55:31.177Z" },
{ url = "https://files.pythonhosted.org/packages/cf/8d/be490e5db8400dacc89056f78a52d44b04fbf75e8439569d5b879623a53b/sqlalchemy-2.0.41-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc", size = 2102827, upload-time = "2025-05-14T17:55:34.921Z" },
{ url = "https://files.pythonhosted.org/packages/a0/72/c97ad430f0b0e78efaf2791342e13ffeafcbb3c06242f01a3bb8fe44f65d/sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1", size = 3225224, upload-time = "2025-05-14T17:50:41.418Z" },
{ url = "https://files.pythonhosted.org/packages/5e/51/5ba9ea3246ea068630acf35a6ba0d181e99f1af1afd17e159eac7e8bc2b8/sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a", size = 3230045, upload-time = "2025-05-14T17:51:54.722Z" },
{ url = "https://files.pythonhosted.org/packages/78/2f/8c14443b2acea700c62f9b4a8bad9e49fc1b65cfb260edead71fd38e9f19/sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d", size = 3159357, upload-time = "2025-05-14T17:50:43.483Z" },
{ url = "https://files.pythonhosted.org/packages/fc/b2/43eacbf6ccc5276d76cea18cb7c3d73e294d6fb21f9ff8b4eef9b42bbfd5/sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23", size = 3197511, upload-time = "2025-05-14T17:51:57.308Z" },
{ url = "https://files.pythonhosted.org/packages/fa/2e/677c17c5d6a004c3c45334ab1dbe7b7deb834430b282b8a0f75ae220c8eb/sqlalchemy-2.0.41-cp313-cp313-win32.whl", hash = "sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f", size = 2082420, upload-time = "2025-05-14T17:55:52.69Z" },
{ url = "https://files.pythonhosted.org/packages/e9/61/e8c1b9b6307c57157d328dd8b8348ddc4c47ffdf1279365a13b2b98b8049/sqlalchemy-2.0.41-cp313-cp313-win_amd64.whl", hash = "sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df", size = 2108329, upload-time = "2025-05-14T17:55:54.495Z" },
{ url = "https://files.pythonhosted.org/packages/1c/fc/9ba22f01b5cdacc8f5ed0d22304718d2c758fce3fd49a5372b886a86f37c/sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576", size = 1911224, upload-time = "2025-05-14T17:39:42.154Z" },
]
[package.optional-dependencies]
aiosqlite = [
{ name = "aiosqlite" },
{ name = "greenlet" },
{ name = "typing-extensions" },
]
[[package]]
name = "starlette"
version = "0.46.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ce/20/08dfcd9c983f6a6f4a1000d934b9e6d626cff8d2eeb77a89a68eef20a2b7/starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5", size = 2580846, upload-time = "2025-04-13T13:56:17.942Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8b/0c/9d30a4ebeb6db2b25a841afbb80f6ef9a854fc3b41be131d249a977b4959/starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35", size = 72037, upload-time = "2025-04-13T13:56:16.21Z" },
]
[[package]]
name = "typing-extensions"
version = "4.13.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967, upload-time = "2025-04-10T14:19:05.416Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806, upload-time = "2025-04-10T14:19:03.967Z" },
]
[[package]]
name = "ulid-py"
version = "1.1.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/3b/53/d14a8ec344048e21431821cb49e9a6722384f982b889c2dd449428dbdcc1/ulid-py-1.1.0.tar.gz", hash = "sha256:dc6884be91558df077c3011b9fb0c87d1097cb8fc6534b11f310161afd5738f0", size = 22514, upload-time = "2020-09-15T15:35:09.414Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/42/7c/a12c879fe6c2b136a718c142115ff99397fbf62b4929d970d58ae386d55f/ulid_py-1.1.0-py2.py3-none-any.whl", hash = "sha256:b56a0f809ef90d6020b21b89a87a48edc7c03aea80e5ed5174172e82d76e3987", size = 25753, upload-time = "2020-09-15T15:35:08.075Z" },
]
[[package]]
name = "unwind"
version = "0"
source = { editable = "." }
dependencies = [
{ name = "alembic" },
{ name = "beautifulsoup4" },
{ name = "html5lib" },
{ name = "httpx" },
{ name = "sqlalchemy", extra = ["aiosqlite"] },
{ name = "starlette" },
{ name = "ulid-py" },
{ name = "uvicorn" },
]
[package.dev-dependencies]
dev = [
{ name = "honcho" },
{ name = "pyright" },
{ name = "pytest" },
{ name = "pytest-asyncio" },
{ name = "pytest-cov" },
{ name = "ruff" },
]
[package.metadata]
requires-dist = [
{ name = "alembic", specifier = ">=1.13.1,<2" },
{ name = "beautifulsoup4", specifier = ">=4.9.3,<5" },
{ name = "html5lib", specifier = "~=1.1" },
{ name = "httpx", specifier = ">=0.28.1" },
{ name = "sqlalchemy", extras = ["aiosqlite"], specifier = "~=2.0" },
{ name = "starlette", specifier = ">=0.46.2" },
{ name = "ulid-py", specifier = ">=1.1.0,<2" },
{ name = "uvicorn", specifier = ">=0.34.2" },
]
[package.metadata.requires-dev]
dev = [
{ name = "honcho" },
{ name = "pyright" },
{ name = "pytest" },
{ name = "pytest-asyncio" },
{ name = "pytest-cov" },
{ name = "ruff" },
]
[[package]]
name = "uvicorn"
version = "0.34.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
{ name = "h11" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a6/ae/9bbb19b9e1c450cf9ecaef06463e40234d98d95bf572fab11b4f19ae5ded/uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328", size = 76815, upload-time = "2025-04-19T06:02:50.101Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b1/4b/4cef6ce21a2aaca9d852a6e84ef4f135d99fcd74fa75105e2fc0c8308acd/uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403", size = 62483, upload-time = "2025-04-19T06:02:48.42Z" },
]
[[package]]
name = "webencodings"
version = "0.5.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" },
]