fix using deprecated SQLAlchemy mapping access
This commit is contained in:
parent
eb76ab1867
commit
496c51402a
3 changed files with 11 additions and 9 deletions
|
|
@ -10,5 +10,6 @@ trap 'rm "$dbfile"' EXIT TERM INT QUIT
|
||||||
|
|
||||||
[ -z "${DEBUG:-}" ] || set -x
|
[ -z "${DEBUG:-}" ] || set -x
|
||||||
|
|
||||||
|
SQLALCHEMY_WARN_20=1 \
|
||||||
UNWIND_STORAGE="$dbfile" \
|
UNWIND_STORAGE="$dbfile" \
|
||||||
python -m pytest "$@"
|
python -m pytest "$@"
|
||||||
|
|
|
||||||
16
unwind/db.py
16
unwind/db.py
|
|
@ -263,7 +263,7 @@ async def get(
|
||||||
query += f" ORDER BY {order_by}"
|
query += f" ORDER BY {order_by}"
|
||||||
async with locked_connection() as conn:
|
async with locked_connection() as conn:
|
||||||
row = await conn.fetch_one(query=query, values=values)
|
row = await conn.fetch_one(query=query, values=values)
|
||||||
return fromplain(model, row, serialized=True) if row else None
|
return fromplain(model, row._mapping, serialized=True) if row else None
|
||||||
|
|
||||||
|
|
||||||
async def get_many(model: Type[ModelType], **kwds) -> Iterable[ModelType]:
|
async def get_many(model: Type[ModelType], **kwds) -> Iterable[ModelType]:
|
||||||
|
|
@ -283,7 +283,7 @@ async def get_many(model: Type[ModelType], **kwds) -> Iterable[ModelType]:
|
||||||
query = f"SELECT {fields_} FROM {model._table} WHERE {cond}"
|
query = f"SELECT {fields_} FROM {model._table} WHERE {cond}"
|
||||||
async with locked_connection() as conn:
|
async with locked_connection() as conn:
|
||||||
rows = await conn.fetch_all(query=query, values=values)
|
rows = await conn.fetch_all(query=query, values=values)
|
||||||
return (fromplain(model, row, serialized=True) for row in rows)
|
return (fromplain(model, row._mapping, serialized=True) for row in rows)
|
||||||
|
|
||||||
|
|
||||||
async def get_all(model: Type[ModelType], **kwds) -> Iterable[ModelType]:
|
async def get_all(model: Type[ModelType], **kwds) -> Iterable[ModelType]:
|
||||||
|
|
@ -294,7 +294,7 @@ async def get_all(model: Type[ModelType], **kwds) -> Iterable[ModelType]:
|
||||||
query = f"SELECT {fields_} FROM {model._table} WHERE {cond}"
|
query = f"SELECT {fields_} FROM {model._table} WHERE {cond}"
|
||||||
async with locked_connection() as conn:
|
async with locked_connection() as conn:
|
||||||
rows = await conn.fetch_all(query=query, values=values)
|
rows = await conn.fetch_all(query=query, values=values)
|
||||||
return (fromplain(model, row, serialized=True) for row in rows)
|
return (fromplain(model, row._mapping, serialized=True) for row in rows)
|
||||||
|
|
||||||
|
|
||||||
async def update(item):
|
async def update(item):
|
||||||
|
|
@ -467,7 +467,7 @@ async def find_ratings(
|
||||||
"""
|
"""
|
||||||
async with locked_connection() as conn:
|
async with locked_connection() as conn:
|
||||||
rows = await conn.fetch_all(bindparams(query, values))
|
rows = await conn.fetch_all(bindparams(query, values))
|
||||||
movie_ids = tuple(r["movie_id"] for r in rows)
|
movie_ids = tuple(r._mapping["movie_id"] for r in rows)
|
||||||
|
|
||||||
if include_unrated and len(movie_ids) < limit_rows:
|
if include_unrated and len(movie_ids) < limit_rows:
|
||||||
sqlin, sqlin_vals = sql_in("id", movie_ids, not_=True)
|
sqlin, sqlin_vals = sql_in("id", movie_ids, not_=True)
|
||||||
|
|
@ -486,7 +486,7 @@ async def find_ratings(
|
||||||
{**values, **sqlin_vals, "limit_rows": limit_rows - len(movie_ids)},
|
{**values, **sqlin_vals, "limit_rows": limit_rows - len(movie_ids)},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
movie_ids += tuple(r["movie_id"] for r in rows)
|
movie_ids += tuple(r._mapping["movie_id"] for r in rows)
|
||||||
|
|
||||||
return await ratings_for_movie_ids(ids=movie_ids)
|
return await ratings_for_movie_ids(ids=movie_ids)
|
||||||
|
|
||||||
|
|
@ -528,7 +528,7 @@ async def ratings_for_movie_ids(
|
||||||
|
|
||||||
async with locked_connection() as conn:
|
async with locked_connection() as conn:
|
||||||
rows = await conn.fetch_all(bindparams(query, vals))
|
rows = await conn.fetch_all(bindparams(query, vals))
|
||||||
return tuple(dict(r) for r in rows)
|
return tuple(dict(r._mapping) for r in rows)
|
||||||
|
|
||||||
|
|
||||||
def sql_fields(tp: Type):
|
def sql_fields(tp: Type):
|
||||||
|
|
@ -584,7 +584,7 @@ async def ratings_for_movies(
|
||||||
async with locked_connection() as conn:
|
async with locked_connection() as conn:
|
||||||
rows = await conn.fetch_all(query, values)
|
rows = await conn.fetch_all(query, values)
|
||||||
|
|
||||||
return (fromplain(Rating, row, serialized=True) for row in rows)
|
return (fromplain(Rating, row._mapping, serialized=True) for row in rows)
|
||||||
|
|
||||||
|
|
||||||
async def find_movies(
|
async def find_movies(
|
||||||
|
|
@ -651,7 +651,7 @@ async def find_movies(
|
||||||
async with locked_connection() as conn:
|
async with locked_connection() as conn:
|
||||||
rows = await conn.fetch_all(bindparams(query, values))
|
rows = await conn.fetch_all(bindparams(query, values))
|
||||||
|
|
||||||
movies = [fromplain(Movie, row, serialized=True) for row in rows]
|
movies = [fromplain(Movie, row._mapping, serialized=True) for row in rows]
|
||||||
|
|
||||||
if not user_ids:
|
if not user_ids:
|
||||||
return ((m, []) for m in movies)
|
return ((m, []) for m in movies)
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ from typing import (
|
||||||
ClassVar,
|
ClassVar,
|
||||||
Container,
|
Container,
|
||||||
Literal,
|
Literal,
|
||||||
|
Mapping,
|
||||||
Type,
|
Type,
|
||||||
TypeVar,
|
TypeVar,
|
||||||
Union,
|
Union,
|
||||||
|
|
@ -144,7 +145,7 @@ def asplain(
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
def fromplain(cls: Type[T], d: dict[str, Any], *, serialized: bool = False) -> T:
|
def fromplain(cls: Type[T], d: Mapping, *, serialized: bool = False) -> T:
|
||||||
"""Return an instance of the given model using the given data.
|
"""Return an instance of the given model using the given data.
|
||||||
|
|
||||||
If `serialized` is `True`, collection types (lists, dicts, etc.) will be
|
If `serialized` is `True`, collection types (lists, dicts, etc.) will be
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue