Compare commits

...

16 Commits

Author SHA1 Message Date
Danilo Reyes
e985e359a7 clean exit 2026-03-02 22:29:04 -06:00
Danilo Reyes
ba42689aa9 fixing download dir 2026-03-01 17:45:58 -06:00
Danilo Reyes
2a55d92f19 bugfix 2026-03-01 17:42:46 -06:00
Danilo Reyes
949f5a94c3 gallery-clean + autocompletions 2026-03-01 17:35:05 -06:00
Danilo Reyes
899543309f download 3.0 2026-03-01 00:22:56 -06:00
Danilo Reyes
5000304a8a fixed requires-revision 2026-03-01 00:22:11 -06:00
Danilo Reyes
76e3d72643 bugfix 2026-02-28 23:59:08 -06:00
Danilo Reyes
e73b4c8083 -sa to gallery/comic 2026-02-28 23:52:48 -06:00
Danilo Reyes
3f44f710b1 fix // and other logic flaws 2026-02-28 23:47:01 -06:00
Danilo Reyes
9da87b68e9 revision logic revisited 2026-02-28 23:33:06 -06:00
Danilo Reyes
766eca4a2f enable renamed links 2026-02-28 23:19:53 -06:00
Danilo Reyes
bda8105928 fix list download admin 2026-02-28 23:01:36 -06:00
Danilo Reyes
45b78ce76a logs display 2026-02-28 22:58:56 -06:00
Danilo Reyes
88e4ac04df lowered error rate 2026-02-28 22:46:59 -06:00
Danilo Reyes
7aab65a73a fzf into download 2026-02-28 22:20:11 -06:00
Danilo Reyes
adab652feb error logic to cancel / disable link 2026-02-28 22:05:01 -06:00
13 changed files with 813 additions and 59 deletions

View File

@@ -7,11 +7,12 @@
gallery-dl,
ffmpeg,
webcomix,
fzf,
...
}:
let
pname = "download";
version = "2.6";
version = "3.0";
in
buildPythonApplication {
inherit pname version;
@@ -32,5 +33,13 @@ buildPythonApplication {
types-pyyaml
yt-dlp
webcomix
fzf
];
postInstall = ''
install -Dm644 completions/download.bash \
$out/share/bash-completion/completions/download
install -Dm644 completions/download.bash \
$out/share/bash-completion/completions/download-admin
'';
}

View File

@@ -14,6 +14,8 @@ from admin_links import cmd_remove
from admin_links import cmd_rename
from admin_links import cmd_unban
from admin_links import cmd_validate_import
from admin_links import cmd_fix_revision
from admin_links import cmd_fix_x_media
from admin_users import cmd_user_rename
from admin_users import cmd_users
@@ -30,40 +32,41 @@ def build_parser() -> argparse.ArgumentParser:
p_disable = sub.add_parser("disable")
p_disable.add_argument("user")
p_disable.add_argument("url")
p_disable.add_argument("url", nargs="?")
p_disable.set_defaults(func=cmd_disable)
p_enable = sub.add_parser("enable")
p_enable.add_argument("user")
p_enable.add_argument("url")
p_enable.add_argument("url", nargs="?")
p_enable.set_defaults(func=cmd_enable)
p_ban = sub.add_parser("ban")
p_ban.add_argument("user")
p_ban.add_argument("url")
p_ban.add_argument("url", nargs="?")
p_ban.add_argument("--reason")
p_ban.set_defaults(func=cmd_ban)
p_unban = sub.add_parser("unban")
p_unban.add_argument("user")
p_unban.add_argument("url")
p_unban.add_argument("url", nargs="?")
p_unban.set_defaults(func=cmd_unban)
p_remove = sub.add_parser("remove")
p_remove.add_argument("user")
p_remove.add_argument("url")
p_remove.add_argument("url", nargs="?")
p_remove.set_defaults(func=cmd_remove)
p_rename = sub.add_parser("rename")
p_rename.add_argument("user")
p_rename.add_argument("old_url")
p_rename.add_argument("new_url")
p_rename.add_argument("old_url", nargs="?")
p_rename.add_argument("new_url", nargs="?")
p_rename.set_defaults(func=cmd_rename)
p_list = sub.add_parser("list")
p_list.add_argument("--user", action="append")
p_list.add_argument("--disabled", action="store_true")
p_list.add_argument("--banned", action="store_true")
p_list.add_argument("--requires-revision", action="store_true")
p_list.set_defaults(func=cmd_list)
p_users = sub.add_parser("users")
@@ -75,6 +78,12 @@ def build_parser() -> argparse.ArgumentParser:
p_validate = sub.add_parser("validate-import")
p_validate.set_defaults(func=cmd_validate_import)
p_fix_rev = sub.add_parser("fix-revision")
p_fix_rev.set_defaults(func=cmd_fix_revision)
p_fix_media = sub.add_parser("fix-x-media")
p_fix_media.set_defaults(func=cmd_fix_x_media)
p_user_rename = sub.add_parser("user-rename")
p_user_rename.add_argument("user")
p_user_rename.add_argument("site")

View File

@@ -3,6 +3,8 @@
from __future__ import annotations
import argparse
import shutil
import subprocess
from pathlib import Path
import db
@@ -59,48 +61,54 @@ def cmd_add(args: argparse.Namespace) -> None:
def cmd_disable(args: argparse.Namespace) -> None:
with db.connect() as conn:
ok = db.set_enabled(conn, args.user, args.url, enabled=False)
if ok:
conn.commit()
print("ok" if ok else "not found")
_apply_to_links(
args,
lambda conn, user, url: db.set_enabled(conn, user, url, enabled=False),
selector_filter="disable",
)
def cmd_enable(args: argparse.Namespace) -> None:
with db.connect() as conn:
ok = db.set_enabled(conn, args.user, args.url, enabled=True)
if ok:
conn.commit()
print("ok" if ok else "not found")
_apply_to_links(
args,
lambda conn, user, url: db.set_enabled(conn, user, url, enabled=True),
selector_filter="enable",
)
def cmd_ban(args: argparse.Namespace) -> None:
with db.connect() as conn:
ok = db.set_banned(conn, args.user, args.url, banned=True, reason=args.reason)
if ok:
conn.commit()
print("ok" if ok else "not found")
_apply_to_links(
args,
lambda conn, user, url: db.set_banned(
conn, user, url, banned=True, reason=args.reason
),
selector_filter="ban",
)
def cmd_unban(args: argparse.Namespace) -> None:
with db.connect() as conn:
ok = db.set_banned(conn, args.user, args.url, banned=False)
if ok:
conn.commit()
print("ok" if ok else "not found")
_apply_to_links(
args,
lambda conn, user, url: db.set_banned(conn, user, url, banned=False),
selector_filter="unban",
)
def cmd_remove(args: argparse.Namespace) -> None:
with db.connect() as conn:
ok = db.remove_link(conn, args.user, args.url)
if ok:
conn.commit()
print("ok" if ok else "not found")
_apply_to_links(args, lambda conn, user, url: db.remove_link(conn, user, url), "any")
def cmd_rename(args: argparse.Namespace) -> None:
old_url = args.old_url
if not old_url:
selection = _select_links(args.user, multi=False, selector_filter="any")
if not selection:
print("not found")
return
old_url = selection[0]
new_url = args.new_url or input("New URL: ").strip()
with db.connect() as conn:
result = db.rename_link(conn, args.user, args.old_url, args.new_url)
result = db.rename_link(conn, args.user, old_url, new_url)
if result["status"] == "renamed":
conn.commit()
print(result["status"])
@@ -108,14 +116,21 @@ def cmd_rename(args: argparse.Namespace) -> None:
def cmd_list(args: argparse.Namespace) -> None:
users = args.user or None
include_disabled = args.disabled or args.requires_revision
include_banned = args.banned or args.requires_revision
with db.connect() as conn:
rows = db.get_links(
conn,
users=users,
include_disabled=args.disabled,
include_banned=args.banned,
include_disabled=include_disabled,
include_banned=include_banned,
requires_revision_only=args.requires_revision,
)
for row in rows:
if args.disabled and row["enabled"]:
continue
if args.banned and not row["banned_at"]:
continue
status = "enabled" if row["enabled"] else "disabled"
if row["banned_at"]:
status = "banned"
@@ -190,3 +205,114 @@ def cmd_validate_import(_: argparse.Namespace) -> None:
[missing_enabled, missing_disabled, extra_enabled, extra_disabled]
):
print(" OK")
def cmd_fix_revision(_: argparse.Namespace) -> None:
with db.connect() as conn:
conn.execute(
"""
UPDATE links
SET requires_revision = 0
WHERE enabled = 1 OR banned_at IS NULL
"""
)
conn.commit()
print("ok")
def cmd_fix_x_media(_: argparse.Namespace) -> None:
with db.connect() as conn:
rows = conn.execute(
"""
SELECT id, user_name, url_original FROM links
WHERE url_original LIKE '%x.com/%//media%'
"""
).fetchall()
for row in rows:
fixed = row["url_original"].replace("//media", "/media")
norm = db.normalize_url(fixed)
conflict = conn.execute(
"""
SELECT id FROM links
WHERE user_name = ? AND url_normalized = ? AND id != ?
""",
(row["user_name"], norm, row["id"]),
).fetchone()
if conflict:
conn.execute("DELETE FROM links WHERE id = ?", (row["id"],))
continue
conn.execute(
"""
UPDATE links
SET url_original = ?, url_normalized = ?, updated_at = CURRENT_TIMESTAMP
WHERE id = ?
""",
(fixed, norm, row["id"]),
)
conn.commit()
print("ok")
def _fzf_select(lines: list[str], multi: bool) -> list[str]:
if not lines:
return []
if shutil.which("fzf") is None:
print("fzf not found.")
return []
args = ["fzf"]
if multi:
args.append("--multi")
proc = subprocess.run(
args,
input="\n".join(lines),
text=True,
capture_output=True,
check=False,
)
if proc.returncode != 0:
return []
return [ln for ln in proc.stdout.splitlines() if ln.strip()]
def _select_links(user: str, multi: bool, selector_filter: str) -> list[str]:
with db.connect() as conn:
rows = db.get_links(conn, users=[user], include_disabled=True, include_banned=True)
links = []
for row in rows:
enabled = bool(row["enabled"])
banned = bool(row["banned_at"])
if selector_filter == "enable" and enabled:
continue
if selector_filter == "disable" and not enabled:
continue
if selector_filter == "ban" and banned:
continue
if selector_filter == "unban" and not banned:
continue
links.append(row["url_original"])
return _fzf_select(links, multi=multi)
def _apply_to_links(args: argparse.Namespace, fn, selector_filter: str) -> None:
if args.url:
with db.connect() as conn:
ok = fn(conn, args.user, args.url)
if ok:
conn.commit()
print("ok" if ok else "not found")
return
selections = _select_links(args.user, multi=True, selector_filter=selector_filter)
if not selections:
print("not found")
return
with db.connect() as conn:
changed = 0
for url in selections:
ok = fn(conn, args.user, url)
if ok:
changed += 1
if changed:
conn.commit()
print(f"ok ({changed})")

View File

@@ -72,5 +72,5 @@ class Gallery:
LOG.debug(command)
self.command = command
def run_command(self, verbose: bool):
run(self.command, verbose)
def run_command(self, verbose: bool, on_line=None, log_failure: bool = True):
run(self.command, verbose, on_line=on_line, log_failure=log_failure)

View File

@@ -62,6 +62,11 @@ class User:
for lst in filter(lambda x: not self.lists[x].is_file(), ["master", "push"]):
self.lists[lst].touch()
for lst in filter(
lambda x: not self.lists[x].is_file(),
["instagram", "kemono", "main"],
):
self.lists[lst].touch()
def append_list(self, name: str, line: str) -> None:
"""Appends a line into the given list"""

View File

@@ -0,0 +1,102 @@
# Bash completion for download and download-admin.
# Source this file or install it in your bash_completion.d directory.
__download_users() {
python3 - <<'PY' 2>/dev/null
import pathlib
try:
import yaml
except Exception:
print("")
raise SystemExit(0)
cfg = pathlib.Path("~/.config/jawz/config.yaml").expanduser()
if not cfg.is_file():
print("")
raise SystemExit(0)
data = yaml.safe_load(cfg.read_text(encoding="utf-8")) or {}
users = [u.get("name") for u in data.get("users", []) if isinstance(u, dict)]
print(" ".join([u for u in users if u]))
PY
}
_download() {
local cur prev words cword
_init_completion -n : || return
local scrappers="push main instagram kemono comic manga webcomic"
local opts="-u --user -i --input -l --list -a --no-archive -s --no_skip -v --verbose -t --type-post"
local post_types="posts reels stories highlights avatar"
if [[ "$cur" == -* ]]; then
COMPREPLY=( $(compgen -W "$opts" -- "$cur") )
return
fi
case "$prev" in
-u|--user)
COMPREPLY=( $(compgen -W "$(__download_users)" -- "$cur") )
return
;;
-t|--type-post)
COMPREPLY=( $(compgen -W "$post_types" -- "$cur") )
return
;;
-i|--input)
return
;;
esac
local have_scrapper=0
local w
for w in "${words[@]:1}"; do
[[ "$w" == -* ]] && continue
if [[ " $scrappers " == *" $w "* ]]; then
have_scrapper=1
break
fi
done
if [[ $have_scrapper -eq 0 ]]; then
COMPREPLY=( $(compgen -W "$scrappers" -- "$cur") )
fi
}
_download_admin() {
local cur prev words cword
_init_completion -n : || return
local cmds="add disable enable ban unban remove rename list users import validate-import fix-revision fix-x-media user-rename"
local list_opts="--user --disabled --banned --requires-revision"
if [[ "$cur" == -* ]]; then
if [[ "${words[1]}" == "list" ]]; then
COMPREPLY=( $(compgen -W "$list_opts" -- "$cur") )
else
COMPREPLY=()
fi
return
fi
case "$prev" in
--user)
COMPREPLY=( $(compgen -W "$(__download_users)" -- "$cur") )
return
;;
esac
if [[ $cword -eq 1 ]]; then
COMPREPLY=( $(compgen -W "$cmds" -- "$cur") )
return
fi
case "${words[1]}" in
add|disable|enable|ban|unban|remove|rename|user-rename)
if [[ $cword -eq 2 ]]; then
COMPREPLY=( $(compgen -W "$(__download_users)" -- "$cur") )
fi
;;
esac
}
complete -F _download download
complete -F _download_admin download-admin

View File

@@ -39,11 +39,14 @@ def ensure_schema(conn: sqlite3.Connection) -> None:
url_normalized TEXT NOT NULL,
site TEXT,
enabled INTEGER NOT NULL DEFAULT 1,
keep INTEGER NOT NULL DEFAULT 0,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
disabled_at TEXT,
disabled_reason TEXT,
banned_at TEXT,
banned_reason TEXT
banned_reason TEXT,
requires_revision INTEGER NOT NULL DEFAULT 0
);
CREATE UNIQUE INDEX IF NOT EXISTS links_user_url_norm
@@ -72,6 +75,31 @@ def ensure_schema(conn: sqlite3.Connection) -> None:
ON link_tombstones (user_name, url_normalized);
"""
)
_ensure_column(
conn,
"links",
"requires_revision",
"ALTER TABLE links ADD COLUMN requires_revision INTEGER NOT NULL DEFAULT 0",
)
_ensure_column(
conn,
"links",
"keep",
"ALTER TABLE links ADD COLUMN keep INTEGER NOT NULL DEFAULT 0",
)
_ensure_column(
conn,
"links",
"disabled_reason",
"ALTER TABLE links ADD COLUMN disabled_reason TEXT",
)
def _ensure_column(conn: sqlite3.Connection, table: str, column: str, ddl: str) -> None:
cols = [row[1] for row in conn.execute(f"PRAGMA table_info({table})").fetchall()]
if column in cols:
return
conn.execute(ddl)
def normalize_url(url: str) -> str:
@@ -156,6 +184,15 @@ def add_link(
""",
(user_name, url_original, url_norm, site),
)
if tombstone:
conn.execute(
"""
UPDATE links
SET requires_revision = 0
WHERE id = ?
""",
(cur.lastrowid,),
)
add_history(
conn,
user_name=user_name,
@@ -172,6 +209,7 @@ def set_enabled(
user_name: str,
url_original: str,
enabled: bool,
reason: str | None = None,
) -> bool:
url_norm = normalize_url(url_original)
row = conn.execute(
@@ -184,7 +222,11 @@ def set_enabled(
conn.execute(
"""
UPDATE links
SET enabled = 1, disabled_at = NULL, updated_at = CURRENT_TIMESTAMP
SET enabled = 1,
disabled_at = NULL,
disabled_reason = NULL,
requires_revision = 0,
updated_at = CURRENT_TIMESTAMP
WHERE id = ?
""",
(row["id"],),
@@ -194,12 +236,22 @@ def set_enabled(
conn.execute(
"""
UPDATE links
SET enabled = 0, disabled_at = CURRENT_TIMESTAMP, updated_at = CURRENT_TIMESTAMP
SET enabled = 0,
disabled_at = CURRENT_TIMESTAMP,
disabled_reason = ?,
updated_at = CURRENT_TIMESTAMP
WHERE id = ?
""",
(row["id"],),
(reason, row["id"]),
)
add_history(
conn,
user_name,
"disable",
link_id=row["id"],
old_url=row["url_original"],
note=reason,
)
add_history(conn, user_name, "disable", link_id=row["id"], old_url=row["url_original"])
return True
@@ -238,7 +290,7 @@ def set_banned(
conn.execute(
"""
UPDATE links
SET banned_at = NULL, banned_reason = NULL, updated_at = CURRENT_TIMESTAMP
SET banned_at = NULL, banned_reason = NULL, requires_revision = 0, updated_at = CURRENT_TIMESTAMP
WHERE id = ?
""",
(row["id"],),
@@ -247,6 +299,74 @@ def set_banned(
return True
def mark_requires_revision(
conn: sqlite3.Connection,
user_name: str,
url_original: str,
reason: str,
) -> bool:
url_norm = normalize_url(url_original)
rows = conn.execute(
"SELECT id, url_original FROM links WHERE user_name = ? AND url_normalized = ?",
(user_name, url_norm),
).fetchall()
if not rows:
return False
for row in rows:
conn.execute(
"""
UPDATE links
SET requires_revision = 1,
enabled = 0,
disabled_at = COALESCE(disabled_at, CURRENT_TIMESTAMP),
updated_at = CURRENT_TIMESTAMP
WHERE id = ?
""",
(row["id"],),
)
add_history(
conn,
user_name,
"requires_revision",
link_id=row["id"],
old_url=row["url_original"],
note=reason,
)
return True
def mark_requires_revision_by_norm(
conn: sqlite3.Connection, url_norm: str, reason: str
) -> int:
rows = conn.execute(
"SELECT id, user_name, url_original FROM links WHERE url_normalized = ?",
(url_norm,),
).fetchall()
if not rows:
return 0
for row in rows:
conn.execute(
"""
UPDATE links
SET requires_revision = 1,
enabled = 0,
disabled_at = COALESCE(disabled_at, CURRENT_TIMESTAMP),
updated_at = CURRENT_TIMESTAMP
WHERE id = ?
""",
(row["id"],),
)
add_history(
conn,
row["user_name"],
"requires_revision",
link_id=row["id"],
old_url=row["url_original"],
note=reason,
)
return len(rows)
def rename_link(
conn: sqlite3.Connection,
user_name: str,
@@ -278,6 +398,14 @@ def rename_link(
""",
(new_url, new_norm, get_site(new_url), row["id"]),
)
conn.execute(
"""
UPDATE links
SET enabled = 1, disabled_at = NULL, requires_revision = 0
WHERE id = ?
""",
(row["id"],),
)
add_history(
conn,
user_name,
@@ -329,6 +457,7 @@ def get_links(
users: Iterable[str] | None = None,
include_disabled: bool = False,
include_banned: bool = False,
requires_revision_only: bool = False,
) -> list[sqlite3.Row]:
params: list = []
where = []
@@ -340,12 +469,88 @@ def get_links(
where.append("enabled = 1")
if not include_banned:
where.append("banned_at IS NULL")
if requires_revision_only:
where.append("requires_revision = 1")
clause = " AND ".join(where)
if clause:
clause = "WHERE " + clause
return conn.execute(f"SELECT * FROM links {clause} ORDER BY user_name, id", params).fetchall()
def get_links_for_cleaning(
conn: sqlite3.Connection,
users: Iterable[str] | None = None,
) -> list[sqlite3.Row]:
params: list = []
where = [
"site = ?",
"enabled = 1",
"banned_at IS NULL",
"keep = 0",
]
params.append("x.com")
user_list = list(users) if users else []
if user_list:
where.append(f"user_name IN ({','.join(['?'] * len(user_list))})")
params.extend(user_list)
clause = " AND ".join(where)
return conn.execute(
f"SELECT * FROM links WHERE {clause} ORDER BY user_name, id",
params,
).fetchall()
def set_keep(
conn: sqlite3.Connection,
user_name: str,
url_original: str,
keep: bool,
reason: str | None = None,
) -> bool:
url_norm = normalize_url(url_original)
row = conn.execute(
"SELECT id, url_original FROM links WHERE user_name = ? AND url_normalized = ?",
(user_name, url_norm),
).fetchone()
if not row:
return False
if keep:
conn.execute(
"""
UPDATE links
SET keep = 1, updated_at = CURRENT_TIMESTAMP
WHERE id = ?
""",
(row["id"],),
)
add_history(
conn,
user_name,
"keep",
link_id=row["id"],
old_url=row["url_original"],
note=reason,
)
else:
conn.execute(
"""
UPDATE links
SET keep = 0, updated_at = CURRENT_TIMESTAMP
WHERE id = ?
""",
(row["id"],),
)
add_history(
conn,
user_name,
"unkeep",
link_id=row["id"],
old_url=row["url_original"],
note=reason,
)
return True
def get_links_by_user(conn: sqlite3.Connection, user_name: str) -> list[sqlite3.Row]:
return conn.execute(
"SELECT * FROM links WHERE user_name = ? ORDER BY id",

View File

@@ -73,15 +73,23 @@ def get_index(name: str) -> int:
def parse_gallery(gdl_list: str, user: User) -> None:
"""Processes the gallery-dl command based on the selected gallery"""
args = get_args()
gallery = Gallery()
gallery.archive = args.flag_archive
gallery.skip_arg = " -o skip=true" if not args.flag_skip else ""
gallery.dest = "download"
gallery.list = gdl_list
gallery.opt_args = parse_instagram(gdl_list)
list_path = user.lists[gdl_list]
if not list_path.is_file():
LOG.warning("List file missing: %s", list_path)
return
with open(list_path, "r", encoding="utf-8") as r_file:
links = list(map(lambda x: x.rstrip(), r_file))
for link in filter(None, links):
gallery = Gallery()
gallery.archive = args.flag_archive
gallery.skip_arg = " -o skip=true" if not args.flag_skip else ""
gallery.dest = "download"
gallery.link = link
gallery.opt_args = parse_instagram(link)
gallery.generate_command(user)
gallery.run_command(args.flag_verbose)
gallery.generate_command(user)
handler = _make_gallery_error_handler(link)
gallery.run_command(args.flag_verbose, on_line=handler, log_failure=False)
def parse_instagram(link: str, post_type: list[str] | str | None = None) -> list[str]:
@@ -95,6 +103,48 @@ def parse_instagram(link: str, post_type: list[str] | str | None = None) -> list
return ["-o", f"include={use_type}"]
REVISION_ERRORS = {
"NotFoundError: Requested user could not be found",
"Unable to retrieve Tweets from this timeline",
"No results for",
}
TRANSIENT_ERRORS = {
"User input required (password)",
"429",
"rate limit",
"timed out",
"timeout",
"Network",
"connection",
}
def _make_gallery_error_handler(link: str):
norm = db.normalize_url(link)
def handle(line: str) -> None:
if "[error]" in line:
reason = line.split("[error]", 1)[1].strip()
LOG.warning("Error for %s: %s", link, reason)
if reason in REVISION_ERRORS:
with db.connect() as conn:
db.mark_requires_revision_by_norm(conn, norm, reason)
conn.commit()
LOG.warning("Marked requires_revision for %s", link)
if any(tok in reason for tok in TRANSIENT_ERRORS):
LOG.warning("Transient error for %s: %s", link, reason)
return
if "No results for" in line:
with db.connect() as conn:
db.mark_requires_revision_by_norm(conn, norm, "No results for")
conn.commit()
LOG.warning("Marked requires_revision for %s", link)
return
return handle
def _comic_skip_arg(link: str, flag_skip: bool) -> str:
if not flag_skip:
return ""
@@ -122,7 +172,8 @@ def _handle_gallery_link(user: User, link: str, args, conn) -> None:
gallery.dest = "download"
gallery.opt_args = parse_instagram(link)
gallery.generate_command(user)
gallery.run_command(args.flag_verbose)
handler = _make_gallery_error_handler(link)
gallery.run_command(args.flag_verbose, on_line=handler, log_failure=False)
def _handle_comic_link(link: str, args) -> None:
@@ -131,7 +182,8 @@ def _handle_comic_link(link: str, args) -> None:
gallery.skip_arg = _comic_skip_arg(link, args.flag_skip)
gallery.link = link
gallery.generate_command(is_comic=True)
gallery.run_command(args.flag_verbose)
handler = _make_gallery_error_handler(link)
gallery.run_command(args.flag_verbose, on_line=handler, log_failure=False)
save_comic(link)
@@ -152,7 +204,8 @@ def _handle_other_link(user: User, link: str, args) -> None:
gallery.link = link
gallery.dest = "push"
gallery.generate_command(user)
gallery.run_command(args.flag_verbose)
handler = _make_gallery_error_handler(link)
gallery.run_command(args.flag_verbose, on_line=handler, log_failure=False)
def video_command(video: Video):

View File

@@ -32,7 +32,7 @@ def validate_x_link(line: str) -> str:
if re.search(r"\/media$", line):
return line
# if does not contain /media at the end then add /media
return f"{line}/media"
return f"{line.rstrip('/')}/media"
def parse_link(link: str) -> str:
@@ -66,6 +66,8 @@ def run(
verbose: bool,
cwd: Path | None = None,
check: bool = False,
on_line=None,
log_failure: bool = True,
) -> None:
"""Run command in a subprocess"""
# pylint: disable=subprocess-run-check
@@ -83,9 +85,28 @@ def run(
else:
args = list(command)
result = subprocess.run(args, check=check, cwd=cwd)
if not check and result.returncode != 0:
LOG.warning("Command failed (%s): %s", result.returncode, args)
if on_line is None:
result = subprocess.run(args, check=check, cwd=cwd)
if log_failure and not check and result.returncode != 0:
LOG.warning("Command failed (%s): %s", result.returncode, args)
return
proc = subprocess.Popen(
args,
cwd=cwd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
)
assert proc.stdout is not None
for line in proc.stdout:
print(line, end="")
on_line(line)
returncode = proc.wait()
if check and returncode != 0:
raise subprocess.CalledProcessError(returncode, args)
if log_failure and not check and returncode != 0:
LOG.warning("Command failed (%s): %s", returncode, args)
def list_lines(i: int, line: str) -> str:

View File

@@ -0,0 +1,144 @@
#!/usr/bin/env python3
"""Interactive cleaner for x.com galleries."""
from __future__ import annotations
import argparse
import shutil
import subprocess
from pathlib import Path
from urllib.parse import urlsplit
import db
from classes.user import User
from functions import load_config_variables
def _extract_handle(url: str) -> str | None:
parts = urlsplit(url if "://" in url else f"https://{url}")
segments = [seg for seg in parts.path.split("/") if seg]
if not segments:
return None
return segments[0]
def _resolve_folder(user: User, handle: str | None) -> Path | None:
base = user.directories.get("download")
if base is None:
return None
if not base.exists():
return None
if not handle:
return base
candidates = [
base / handle,
]
for cand in candidates:
if cand.exists():
return cand
return None
def _open_folder(path: Path) -> None:
if shutil.which("xdg-open") is None:
print("xdg-open not found; skipping folder open.")
return
subprocess.run(["xdg-open", str(path)], check=False)
def _prompt() -> str:
return input("Keep? [y] keep / [n] disable / [s] skip / [q] quit: ").strip().lower()
def _build_user_index(configs: dict) -> dict[str, int]:
return {entry["name"]: idx for idx, entry in enumerate(configs["users"])}
def _validate_users(user_index: dict[str, int], users: list[str] | None) -> bool:
if not users:
return True
unknown = [u for u in users if u not in user_index]
if not unknown:
return True
print(f"Unknown users: {', '.join(unknown)}")
return False
def _print_context(user_name: str, url: str, handle: str | None, folder: Path | None) -> None:
print(f"\nUser: {user_name}")
print(f"URL: {url}")
if handle:
print(f"Handle: {handle}")
if folder:
print(f"Folder: {folder}")
_open_folder(folder)
return
print("Folder: <unknown>")
def _apply_choice(
conn,
user_name: str,
url: str,
choice: str,
reason: str,
) -> bool | None:
if choice in ("y", "yes"):
ok = db.set_keep(conn, user_name, url, keep=True, reason=reason)
if ok:
conn.commit()
return True
if choice in ("n", "no"):
ok = db.set_enabled(conn, user_name, url, enabled=False, reason=reason)
if ok:
conn.commit()
return True
if choice in ("s", "skip", ""):
return True
if choice in ("q", "quit"):
return None
print("Please enter y, n, s, or q.")
return False
def main() -> None:
parser = argparse.ArgumentParser(prog="gallery-clean")
parser.add_argument(
"session",
nargs="?",
type=int,
default=10,
help="Number of links to review this session (default: 10)",
)
parser.add_argument(
"--reason",
default="gallery-clean",
help="Reason stored when disabling or keeping",
)
args = parser.parse_args()
configs = load_config_variables()
user_index = _build_user_index(configs)
users_filter = ["jawz"]
if not _validate_users(user_index, users_filter):
return
with db.connect(configs) as conn:
rows = db.get_links_for_cleaning(conn, users=users_filter)
for row in rows[: max(args.session, 0)]:
user_name = row["user_name"]
url = row["url_original"]
handle = _extract_handle(url)
folder = _resolve_folder(User(user_index[user_name]), handle)
_print_context(user_name, url, handle, folder)
while True:
result = _apply_choice(conn, user_name, url, _prompt(), args.reason)
if result is None:
return
if result:
break
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,72 @@
#!/usr/bin/env python3
"""fzf-based selectors for comic and gallery links."""
from __future__ import annotations
import argparse
import re
import subprocess
import db
USER = "jawz"
RGX_COMIC = re.compile("readcomiconline|mangahere|mangadex|webtoons|manganato")
def _select_links(urls: list[str]) -> list[str]:
if not urls:
return []
proc = subprocess.run(
["fzf", "--multi", "--exact", "-i"],
input="\n".join(urls),
text=True,
capture_output=True,
check=False,
)
if proc.returncode != 0:
return []
return [ln for ln in proc.stdout.splitlines() if ln.strip()]
def _run_download(selected: list[str], extra_args: list[str]) -> None:
if not selected:
return
subprocess.run(["download", "-u", USER, *extra_args, "-i", *selected], check=False)
def _parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument("-s", "--no_skip", dest="flag_skip", action="store_false")
parser.add_argument("-a", "--no-archive", dest="flag_archive", action="store_false")
return parser.parse_args()
def _extra_args_from_flags(args: argparse.Namespace) -> list[str]:
extra = []
if args.flag_skip is False:
extra.append("-s")
if args.flag_archive is False:
extra.append("-a")
return extra
def comic_main() -> None:
args = _parse_args()
extra_args = _extra_args_from_flags(args)
with db.connect() as conn:
rows = db.get_links(conn, users=[USER], include_disabled=False, include_banned=False)
urls = [row["url_original"] for row in rows if RGX_COMIC.search(row["url_original"])]
_run_download(_select_links(urls), extra_args)
def gallery_main() -> None:
args = _parse_args()
extra_args = _extra_args_from_flags(args)
with db.connect() as conn:
rows = db.get_links(conn, users=[USER], include_disabled=False, include_banned=False)
urls = [row["url_original"] for row in rows if not RGX_COMIC.search(row["url_original"])]
_run_download(_select_links(urls), extra_args)
if __name__ == "__main__":
gallery_main()

View File

@@ -10,6 +10,8 @@ py_modules =
admin
admin_links
admin_users
select_links
gallery_clean
classes.gallery
classes.user
@@ -17,3 +19,6 @@ py_modules =
console_scripts =
download = download:main
download-admin = admin:main
comic = select_links:comic_main
gallery = select_links:gallery_main
gallery-clean = gallery_clean:main

View File

@@ -38,6 +38,7 @@ class TestDownload(unittest.TestCase):
self.orig_db_connect = download.db.connect
self.orig_db_add_link = download.db.add_link
self.orig_save_comic = download.save_comic
self.orig_make_handler = download._make_gallery_error_handler
def tearDown(self) -> None:
download.Gallery = self.orig_gallery
@@ -46,6 +47,7 @@ class TestDownload(unittest.TestCase):
download.db.connect = self.orig_db_connect
download.db.add_link = self.orig_db_add_link
download.save_comic = self.orig_save_comic
download._make_gallery_error_handler = self.orig_make_handler
def test_parse_instagram(self):
res = download.parse_instagram("https://instagram.com/user")
@@ -101,6 +103,7 @@ class TestDownload(unittest.TestCase):
download.video_command = fake_video_command
download.run = lambda *args, **kwargs: None
download.save_comic = lambda *_args, **_kwargs: None
download._make_gallery_error_handler = lambda *_args, **_kwargs: None
links = [
"https://x.com/someuser",