init with some kind of working prototype
This commit is contained in:
commit
b5cb22822e
22 changed files with 1292 additions and 0 deletions
139
unwind/models.py
Normal file
139
unwind/models.py
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
import json
|
||||
from dataclasses import asdict, dataclass, field, fields, is_dataclass
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, ClassVar, Optional, Type, Union, get_args, get_origin
|
||||
|
||||
from .types import ULID
|
||||
|
||||
|
||||
def is_optional(tp: Type):
|
||||
if get_origin(tp) is not Union:
|
||||
return False
|
||||
|
||||
args = get_args(tp)
|
||||
return len(args) == 2 and type(None) in args
|
||||
|
||||
|
||||
def optional_type(tp: Type):
|
||||
if get_origin(tp) is not Union:
|
||||
return None
|
||||
|
||||
args = get_args(tp)
|
||||
if len(args) != 2 or args[1] is not type(None):
|
||||
return None
|
||||
|
||||
return args[0]
|
||||
|
||||
|
||||
def asplain(o) -> dict[str, Any]:
|
||||
validate(o)
|
||||
|
||||
d = asdict(o)
|
||||
for f in fields(o):
|
||||
|
||||
target = f.type
|
||||
# XXX this doesn't properly support any kind of nested types
|
||||
if (otype := optional_type(f.type)) is not None:
|
||||
target = otype
|
||||
if (otype := get_origin(target)) is not None:
|
||||
target = otype
|
||||
|
||||
v = d[f.name]
|
||||
if target is ULID:
|
||||
d[f.name] = str(v)
|
||||
elif target in {datetime}:
|
||||
d[f.name] = v.isoformat()
|
||||
elif target in {set}:
|
||||
d[f.name] = json.dumps(list(sorted(v)))
|
||||
elif target in {list}:
|
||||
d[f.name] = json.dumps(list(v))
|
||||
elif target in {bool, str, int, float, None}:
|
||||
pass
|
||||
else:
|
||||
raise ValueError(f"Unsupported value type: {f.name}: {type(v)}")
|
||||
|
||||
return d
|
||||
|
||||
|
||||
def fromplain(cls, d: dict[str, Any]):
|
||||
# if not is_dataclass(cls):
|
||||
# raise TypeError(f'Not a dataclass: {type(cls)}')
|
||||
|
||||
dd = {}
|
||||
for f in fields(cls):
|
||||
|
||||
target = f.type
|
||||
otype = optional_type(f.type)
|
||||
is_opt = otype is not None
|
||||
if is_opt:
|
||||
target = otype
|
||||
if (xtype := get_origin(target)) is not None:
|
||||
target = xtype
|
||||
|
||||
v = d[f.name]
|
||||
if is_opt and v is None:
|
||||
dd[f.name] = v
|
||||
elif isinstance(v, target):
|
||||
dd[f.name] = v
|
||||
elif target in {set, list}:
|
||||
dd[f.name] = target(json.loads(v))
|
||||
elif target in {datetime}:
|
||||
dd[f.name] = target.fromisoformat(v)
|
||||
else:
|
||||
dd[f.name] = target(v)
|
||||
|
||||
o = cls(**dd)
|
||||
validate(o)
|
||||
return o
|
||||
|
||||
|
||||
def validate(o):
|
||||
for f in fields(o):
|
||||
vtype = type(getattr(o, f.name))
|
||||
if vtype is not f.type:
|
||||
if get_origin(f.type) is vtype or (
|
||||
get_origin(f.type) is Union and vtype in get_args(f.type)
|
||||
):
|
||||
continue
|
||||
raise ValueError(f"Invalid value type: {f.name}: {vtype}")
|
||||
|
||||
|
||||
def utcnow():
|
||||
return datetime.now().replace(tzinfo=timezone.utc)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Movie:
|
||||
_table: ClassVar[str] = "movies"
|
||||
|
||||
id: ULID = field(default_factory=ULID)
|
||||
title: str = None # canonical title
|
||||
release_year: int = None # canonical release date
|
||||
media_type: Optional[str] = None
|
||||
imdb_id: str = None
|
||||
score: int = None # range: [0,100]
|
||||
runtime: Optional[int] = None # minutes
|
||||
genres: set[str] = None
|
||||
updated: datetime = field(default_factory=utcnow)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Rating:
|
||||
_table: ClassVar[str] = "ratings"
|
||||
|
||||
id: ULID = field(default_factory=ULID)
|
||||
movie_id: ULID = None
|
||||
user_id: ULID = None
|
||||
score: int = None # range: [0,100]
|
||||
rating_date: datetime = None
|
||||
favorite: Optional[bool] = None
|
||||
finished: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class User:
|
||||
_table: ClassVar[str] = "users"
|
||||
|
||||
id: ULID = field(default_factory=ULID)
|
||||
imdb_id: str = None
|
||||
name: str = None # canonical user name
|
||||
Loading…
Add table
Add a link
Reference in a new issue