run reformatting

This commit is contained in:
ducklet 2023-02-08 23:00:57 +01:00
parent 6fac05d9d3
commit b6670191e5
5 changed files with 3 additions and 32 deletions

View file

@ -69,7 +69,6 @@ def getargs() -> argparse.Namespace:
# Command: scan # Command: scan
with command_parser("scan", help="Import from a local file system.") as subparser: with command_parser("scan", help="Import from a local file system.") as subparser:
subparser.add_argument( subparser.add_argument(
"basedir", "basedir",
type=Path, type=Path,
@ -97,7 +96,6 @@ def getargs() -> argparse.Namespace:
help="Import from `ls -lR`.", help="Import from `ls -lR`.",
description="When ingesting data from an external source, the hostname will not be set automatically.", description="When ingesting data from an external source, the hostname will not be set automatically.",
) as subparser: ) as subparser:
subparser.add_argument( subparser.add_argument(
"infile", "infile",
nargs="?", nargs="?",
@ -121,7 +119,6 @@ def getargs() -> argparse.Namespace:
with command_parser( with command_parser(
"ingest-db", help="Import from a metadex.sqlite file." "ingest-db", help="Import from a metadex.sqlite file."
) as subparser: ) as subparser:
subparser.add_argument( subparser.add_argument(
"infile", "infile",
type=Path, type=Path,
@ -138,7 +135,6 @@ def getargs() -> argparse.Namespace:
with command_parser( with command_parser(
"ingest-rclone-json", help="Import from `rclone lsjson`." "ingest-rclone-json", help="Import from `rclone lsjson`."
) as subparser: ) as subparser:
subparser.add_argument( subparser.add_argument(
"infile", "infile",
nargs="?", nargs="?",
@ -301,15 +297,12 @@ def cmd_ls(args: argparse.Namespace) -> int:
args.file = [f for f in args.file if f] args.file = [f for f in args.file if f]
if not args.file: if not args.file:
# List all known hosts. # List all known hosts.
for host in sorted(metadex.hosts(), key=str.casefold): for host in sorted(metadex.hosts(), key=str.casefold):
print(f"{host}:") print(f"{host}:")
else: else:
for pathspec in args.file: for pathspec in args.file:
is_match = False is_match = False
for file in metadex.ls(pathspec, type=args.type, match=args.match): for file in metadex.ls(pathspec, type=args.type, match=args.match):
is_match = True is_match = True

View file

@ -249,7 +249,6 @@ def search(
hostname_like: "str | None" = None, hostname_like: "str | None" = None,
hostname_regex: "str | None" = None, hostname_regex: "str | None" = None,
) -> "Iterable[Row]": ) -> "Iterable[Row]":
stmt = select(metadex) stmt = select(metadex)
if type: if type:
@ -524,8 +523,7 @@ def reassign_parent_ids(conn: Connection):
stmt = select( stmt = select(
metadex.c.id, metadex.c.parent_id, metadex.c.location, metadex.c.hostname metadex.c.id, metadex.c.parent_id, metadex.c.location, metadex.c.hostname
) )
for (m_id, p_id_old, loc, host) in conn.execute(stmt): for m_id, p_id_old, loc, host in conn.execute(stmt):
parent_loc = str(Path(loc).parent) parent_loc = str(Path(loc).parent)
if parent_loc == loc: if parent_loc == loc:
p_id = None p_id = None

View file

@ -95,12 +95,10 @@ def parse_file(
def parse_lines( def parse_lines(
lines: Iterable[str], *, ref_year: "int | None" = None lines: Iterable[str], *, ref_year: "int | None" = None
) -> Iterable["File | ChangeDir"]: ) -> Iterable["File | ChangeDir"]:
workdir = Path("/") workdir = Path("/")
dirname: "Path | None" = None dirname: "Path | None" = None
for i, line in enumerate(lines, start=1): for i, line in enumerate(lines, start=1):
if not line: if not line:
# empty line, reset context # empty line, reset context
if dirname is not None: if dirname is not None:

View file

@ -7,6 +7,7 @@ from dataclasses import dataclass
from datetime import datetime, timezone from datetime import datetime, timezone
from pathlib import Path from pathlib import Path
from typing import Any, Iterable, Literal, TextIO from typing import Any, Iterable, Literal, TextIO
from typing_extensions import TypeAlias from typing_extensions import TypeAlias
from . import config, db, ignore, ls_parser, models, utils from . import config, db, ignore, ls_parser, models, utils
@ -56,7 +57,6 @@ def _scan_add_only(
context = _LogContext() context = _LogContext()
with db.transaction() as conn: with db.transaction() as conn:
context.seen += 1 context.seen += 1
d = models.File.dict_from_entry(path) d = models.File.dict_from_entry(path)
@ -80,7 +80,6 @@ def _scan_add_only(
dirs.append(path) dirs.append(path)
while dirs: while dirs:
cwd = dirs.popleft() cwd = dirs.popleft()
try: try:
scan = os.scandir(cwd) scan = os.scandir(cwd)
@ -91,7 +90,6 @@ def _scan_add_only(
subdirs: deque[Path] = deque() subdirs: deque[Path] = deque()
with scan as files: with scan as files:
for f in files: for f in files:
context.seen += 1 context.seen += 1
_log_context(f.path, context) _log_context(f.path, context)
@ -139,7 +137,6 @@ def _scan_remove_missing(
context = _LogContext() context = _LogContext()
with db.transaction() as conn: with db.transaction() as conn:
context.seen += 1 context.seen += 1
d = models.File.dict_from_entry(path) d = models.File.dict_from_entry(path)
@ -163,7 +160,6 @@ def _scan_remove_missing(
dirs.append(path) dirs.append(path)
while dirs: while dirs:
cwd = dirs.popleft() cwd = dirs.popleft()
try: try:
scan = os.scandir(cwd) scan = os.scandir(cwd)
@ -176,7 +172,6 @@ def _scan_remove_missing(
subdirs: deque[Path] = deque() subdirs: deque[Path] = deque()
with scan as files: with scan as files:
for f in files: for f in files:
context.seen += 1 context.seen += 1
_log_context(f.path, context) _log_context(f.path, context)
@ -333,9 +328,7 @@ def ingest_db_file(
with db.transaction() as conn, other_db.transaction( with db.transaction() as conn, other_db.transaction(
force_rollback=True force_rollback=True
) as other_conn: ) as other_conn:
for row in db.iter_all(other_conn): for row in db.iter_all(other_conn):
context.seen += 1 context.seen += 1
_log_context(row["location"], context) _log_context(row["location"], context)
@ -392,7 +385,6 @@ def ingest_rclone_json(
context = _LogContext() context = _LogContext()
with db.transaction() as conn: with db.transaction() as conn:
for d in _parse_rclone_json(file, remote_base=remote_base): for d in _parse_rclone_json(file, remote_base=remote_base):
context.seen += 1 context.seen += 1
@ -431,9 +423,7 @@ def _ingest_ls_add_only(
context = _LogContext() context = _LogContext()
with db.transaction() as conn: with db.transaction() as conn:
for f in ls_parser.parse_file(file, ref_year=ref_year): for f in ls_parser.parse_file(file, ref_year=ref_year):
if isinstance(f, ls_parser.ChangeDir): if isinstance(f, ls_parser.ChangeDir):
continue continue
@ -483,11 +473,8 @@ def _ingest_ls_remove_missing(
context = _LogContext() context = _LogContext()
with db.transaction() as conn: with db.transaction() as conn:
for f in ls_parser.parse_file(file, ref_year=ref_year): for f in ls_parser.parse_file(file, ref_year=ref_year):
if isinstance(f, ls_parser.ChangeDir): if isinstance(f, ls_parser.ChangeDir):
if f.to is not None: if f.to is not None:
expected = {name for name in db.files_in_dir(conn, str(f.to))} expected = {name for name in db.files_in_dir(conn, str(f.to))}
@ -550,14 +537,12 @@ def _ls_files(
with db.transaction() as conn: with db.transaction() as conn:
if match == "regex": if match == "regex":
for f in db.search( for f in db.search(
conn, type=type, hostname_regex=host, regex=f"(?i){path}" conn, type=type, hostname_regex=host, regex=f"(?i){path}"
): ):
yield models.File(**f) # type: ignore yield models.File(**f) # type: ignore
elif match == "glob": elif match == "glob":
filters: dict[str, "str | None"] = {"type": type} filters: dict[str, "str | None"] = {"type": type}
if host and _uses_glob(host): if host and _uses_glob(host):
filters["hostname_like"] = liketerm_from_glob(host) filters["hostname_like"] = liketerm_from_glob(host)
@ -588,7 +573,6 @@ def _ls_files(
yield models.File(**f) # type: ignore yield models.File(**f) # type: ignore
elif match == "fuzzy": elif match == "fuzzy":
term = "%".join(db.escape(p) for p in path.split("/")) term = "%".join(db.escape(p) for p in path.split("/"))
for f in db.search(conn, like=f"%{term}%", type=type, hostname=host): for f in db.search(conn, like=f"%{term}%", type=type, hostname=host):
@ -598,9 +582,7 @@ def _ls_files(
def _ls_dir_contents( def _ls_dir_contents(
*, host: str, path: str, type: "models.StatType | None" = None *, host: str, path: str, type: "models.StatType | None" = None
) -> Iterable[models.File]: ) -> Iterable[models.File]:
with db.transaction() as conn: with db.transaction() as conn:
row = db.get_file(conn, location=path, hostname=host) row = db.get_file(conn, location=path, hostname=host)
if not row: if not row:
@ -660,7 +642,6 @@ def rm(pathspec: str, *, include_children: bool = False) -> None:
path = path[:-1] path = path[:-1]
with db.transaction() as conn: with db.transaction() as conn:
row = db.get_file(conn, hostname=host, location=path) row = db.get_file(conn, hostname=host, location=path)
if not row: if not row:

View file

@ -5,6 +5,7 @@ from os import DirEntry
from pathlib import Path from pathlib import Path
from stat import S_IFDIR, S_IFLNK, S_IFMT, S_IFREG from stat import S_IFDIR, S_IFLNK, S_IFMT, S_IFREG
from typing import Literal from typing import Literal
from typing_extensions import Self from typing_extensions import Self
from . import config from . import config