This commit is contained in:
Danilo Reyes
2025-12-28 20:59:09 -06:00
commit 96fcc2b9e8
35 changed files with 2603 additions and 0 deletions

115
backend/alembic.ini Normal file
View File

@@ -0,0 +1,115 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

87
backend/alembic/env.py Normal file
View File

@@ -0,0 +1,87 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
import os
import sys
# Add parent directory to path
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from app.core.config import settings
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Set the SQLAlchemy URL from our settings
config.set_main_option("sqlalchemy.url", settings.database_url.replace("postgresql://", "postgresql+psycopg://"))
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,25 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,133 @@
"""Initial schema
Revision ID: 001
Revises:
Create Date: 2024-01-01 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create schema
op.execute("CREATE SCHEMA IF NOT EXISTS moviemap")
# Create enums
op.execute("""
DO $$ BEGIN
CREATE TYPE moviemap.source_kind AS ENUM ('radarr', 'sonarr', 'lidarr');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
""")
op.execute("""
DO $$ BEGIN
CREATE TYPE moviemap.media_type AS ENUM ('movie', 'show', 'music');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
""")
op.execute("""
DO $$ BEGIN
CREATE TYPE moviemap.watched_media_type AS ENUM ('movie', 'show');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
""")
# Create source table
op.execute("""
CREATE TABLE IF NOT EXISTS moviemap.source (
id SERIAL PRIMARY KEY,
kind moviemap.source_kind NOT NULL UNIQUE,
base_url TEXT NOT NULL,
enabled BOOLEAN NOT NULL DEFAULT true,
last_sync_at TIMESTAMPTZ
)
""")
# Create media_item table
op.execute("""
CREATE TABLE IF NOT EXISTS moviemap.media_item (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
source_kind moviemap.source_kind NOT NULL,
source_item_id INTEGER NOT NULL,
title TEXT NOT NULL,
year INTEGER,
media_type moviemap.media_type NOT NULL,
arr_raw JSONB,
UNIQUE (source_kind, source_item_id)
)
""")
# Create media_country table
op.execute("""
CREATE TABLE IF NOT EXISTS moviemap.media_country (
media_item_id UUID NOT NULL REFERENCES moviemap.media_item(id) ON DELETE CASCADE,
country_code CHAR(2) NOT NULL,
weight SMALLINT NOT NULL DEFAULT 1,
PRIMARY KEY (media_item_id, country_code)
)
""")
# Create watched_item table
op.execute("""
CREATE TABLE IF NOT EXISTS moviemap.watched_item (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
media_type moviemap.watched_media_type NOT NULL,
title TEXT NOT NULL,
year INTEGER,
country_code CHAR(2) NOT NULL,
watched_at TIMESTAMPTZ,
notes TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
)
""")
# Create manual_pin table
op.execute("""
CREATE TABLE IF NOT EXISTS moviemap.manual_pin (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
country_code CHAR(2) NOT NULL,
label TEXT,
pinned_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
)
""")
# Create indexes
op.execute("CREATE INDEX IF NOT EXISTS idx_media_item_source ON moviemap.media_item (source_kind, source_item_id)")
op.execute("CREATE INDEX IF NOT EXISTS idx_media_country_code ON moviemap.media_country (country_code)")
op.execute("CREATE INDEX IF NOT EXISTS idx_watched_country ON moviemap.watched_item (country_code)")
op.execute("CREATE INDEX IF NOT EXISTS idx_watched_media_type ON moviemap.watched_item (media_type)")
op.execute("CREATE INDEX IF NOT EXISTS idx_pin_country ON moviemap.manual_pin (country_code)")
def downgrade() -> None:
op.execute("DROP INDEX IF EXISTS moviemap.idx_pin_country")
op.execute("DROP INDEX IF EXISTS moviemap.idx_watched_media_type")
op.execute("DROP INDEX IF EXISTS moviemap.idx_watched_country")
op.execute("DROP INDEX IF EXISTS moviemap.idx_media_country_code")
op.execute("DROP INDEX IF EXISTS moviemap.idx_media_item_source")
op.execute("DROP TABLE IF EXISTS moviemap.manual_pin")
op.execute("DROP TABLE IF EXISTS moviemap.watched_item")
op.execute("DROP TABLE IF EXISTS moviemap.media_country")
op.execute("DROP TABLE IF EXISTS moviemap.media_item")
op.execute("DROP TABLE IF EXISTS moviemap.source")
op.execute("DROP TYPE IF EXISTS moviemap.watched_media_type")
op.execute("DROP TYPE IF EXISTS moviemap.media_type")
op.execute("DROP TYPE IF EXISTS moviemap.source_kind")
op.execute("DROP SCHEMA IF EXISTS moviemap")

0
backend/app/__init__.py Normal file
View File

View File

35
backend/app/api/admin.py Normal file
View File

@@ -0,0 +1,35 @@
"""Admin API endpoints"""
from fastapi import APIRouter, HTTPException, Header
from typing import Optional
from app.core.database import pool
from app.core.config import settings
from app.services.sync import sync_all_arrs
router = APIRouter()
async def verify_admin_token(authorization: Optional[str] = Header(None)):
"""Verify admin token if configured"""
if settings.admin_token:
if not authorization or authorization != f"Bearer {settings.admin_token}":
raise HTTPException(status_code=401, detail="Unauthorized")
# If no admin token configured, allow (assuming localhost-only access)
@router.post("/sync")
async def trigger_sync(authorization: Optional[str] = Header(None)):
"""
Trigger sync from all *arr instances.
Requires admin token if MOVIEMAP_ADMIN_TOKEN is set.
"""
await verify_admin_token(authorization)
try:
result = await sync_all_arrs()
return {
"status": "success",
"synced": result
}
except Exception as e:
raise HTTPException(status_code=500, detail=f"Sync failed: {str(e)}")

View File

@@ -0,0 +1,61 @@
"""Collection API endpoints"""
from fastapi import APIRouter, Query
from typing import List, Optional
from app.core.database import pool
import json
router = APIRouter()
@router.get("/summary")
async def get_collection_summary(
types: Optional[str] = Query(None, description="Comma-separated list: movie,show,music")
):
"""
Get collection summary by country and media type.
Returns counts per country per media type.
"""
# Pool should be initialized on startup, but check just in case
if not pool:
from app.core.database import init_db
await init_db()
# Parse types filter
type_filter = []
if types:
type_filter = [t.strip() for t in types.split(",") if t.strip() in ["movie", "show", "music"]]
async with pool.connection() as conn:
async with conn.cursor() as cur:
# Build query
query = """
SELECT
mc.country_code,
mi.media_type,
COUNT(*) as count
FROM moviemap.media_country mc
JOIN moviemap.media_item mi ON mc.media_item_id = mi.id
"""
params = []
if type_filter:
query += " WHERE mi.media_type = ANY(%s)"
params.append(type_filter)
query += """
GROUP BY mc.country_code, mi.media_type
ORDER BY mc.country_code, mi.media_type
"""
await cur.execute(query, params if params else None)
rows = await cur.fetchall()
# Transform to nested dict structure
result = {}
for row in rows:
country_code, media_type, count = row
if country_code not in result:
result[country_code] = {}
result[country_code][media_type] = count
return result

87
backend/app/api/pins.py Normal file
View File

@@ -0,0 +1,87 @@
"""Manual pins API endpoints"""
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from typing import Optional
from uuid import UUID
from app.core.database import pool
router = APIRouter()
class PinCreate(BaseModel):
country_code: str
label: Optional[str] = None
@router.get("")
async def list_pins():
"""List all manual pins"""
# Pool should be initialized on startup
if not pool:
from app.core.database import init_db
await init_db()
async with pool.connection() as conn:
async with conn.cursor() as cur:
query = """
SELECT id, country_code, label, pinned_at
FROM moviemap.manual_pin
ORDER BY pinned_at DESC
"""
await cur.execute(query)
rows = await cur.fetchall()
pins = []
for row in rows:
pins.append({
"id": str(row[0]),
"country_code": row[1],
"label": row[2],
"pinned_at": row[3].isoformat() if row[3] else None,
})
return pins
@router.post("")
async def create_pin(pin: PinCreate):
"""Create a new manual pin"""
# Pool should be initialized on startup
if not pool:
from app.core.database import init_db
await init_db()
async with pool.connection() as conn:
async with conn.cursor() as cur:
query = """
INSERT INTO moviemap.manual_pin (country_code, label)
VALUES (%s, %s)
RETURNING id
"""
await cur.execute(query, (pin.country_code, pin.label))
result = await cur.fetchone()
await conn.commit()
return {"id": str(result[0]), "status": "created"}
@router.delete("/{pin_id}")
async def delete_pin(pin_id: UUID):
"""Delete a manual pin"""
# Pool should be initialized on startup
if not pool:
from app.core.database import init_db
await init_db()
async with pool.connection() as conn:
async with conn.cursor() as cur:
query = "DELETE FROM moviemap.manual_pin WHERE id = %s RETURNING id"
await cur.execute(query, (str(pin_id),))
result = await cur.fetchone()
await conn.commit()
if not result:
raise HTTPException(status_code=404, detail="Pin not found")
return {"id": str(result[0]), "status": "deleted"}

208
backend/app/api/watched.py Normal file
View File

@@ -0,0 +1,208 @@
"""Watched items API endpoints"""
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from typing import Optional
from datetime import datetime
from uuid import UUID
from app.core.database import pool
import json
router = APIRouter()
class WatchedItemCreate(BaseModel):
media_type: str # "movie" or "show"
title: str
year: Optional[int] = None
country_code: str
watched_at: Optional[datetime] = None
notes: Optional[str] = None
class WatchedItemUpdate(BaseModel):
title: Optional[str] = None
year: Optional[int] = None
country_code: Optional[str] = None
watched_at: Optional[datetime] = None
notes: Optional[str] = None
@router.get("/summary")
async def get_watched_summary():
"""Get watched items summary by country"""
# Pool should be initialized on startup
if not pool:
from app.core.database import init_db
await init_db()
async with pool.connection() as conn:
async with conn.cursor() as cur:
query = """
SELECT
country_code,
media_type,
COUNT(*) as count
FROM moviemap.watched_item
WHERE watched_at IS NOT NULL
GROUP BY country_code, media_type
ORDER BY country_code, media_type
"""
await cur.execute(query)
rows = await cur.fetchall()
result = {}
for row in rows:
country_code, media_type, count = row
if country_code not in result:
result[country_code] = {}
result[country_code][media_type] = count
return result
@router.get("")
async def list_watched_items():
"""List all watched items"""
# Pool should be initialized on startup
if not pool:
from app.core.database import init_db
await init_db()
async with pool.connection() as conn:
async with conn.cursor() as cur:
query = """
SELECT
id, media_type, title, year, country_code,
watched_at, notes, created_at, updated_at
FROM moviemap.watched_item
ORDER BY created_at DESC
"""
await cur.execute(query)
rows = await cur.fetchall()
items = []
for row in rows:
items.append({
"id": str(row[0]),
"media_type": row[1],
"title": row[2],
"year": row[3],
"country_code": row[4],
"watched_at": row[5].isoformat() if row[5] else None,
"notes": row[6],
"created_at": row[7].isoformat() if row[7] else None,
"updated_at": row[8].isoformat() if row[8] else None,
})
return items
@router.post("")
async def create_watched_item(item: WatchedItemCreate):
"""Create a new watched item"""
# Pool should be initialized on startup
if not pool:
from app.core.database import init_db
await init_db()
if item.media_type not in ["movie", "show"]:
raise HTTPException(status_code=400, detail="media_type must be 'movie' or 'show'")
async with pool.connection() as conn:
async with conn.cursor() as cur:
query = """
INSERT INTO moviemap.watched_item
(media_type, title, year, country_code, watched_at, notes)
VALUES (%s, %s, %s, %s, %s, %s)
RETURNING id
"""
await cur.execute(
query,
(
item.media_type,
item.title,
item.year,
item.country_code,
item.watched_at,
item.notes,
)
)
result = await cur.fetchone()
await conn.commit()
return {"id": str(result[0]), "status": "created"}
@router.patch("/{item_id}")
async def update_watched_item(item_id: UUID, item: WatchedItemUpdate):
"""Update a watched item"""
# Pool should be initialized on startup
if not pool:
from app.core.database import init_db
await init_db()
async with pool.connection() as conn:
async with conn.cursor() as cur:
# Build dynamic update query
updates = []
params = []
if item.title is not None:
updates.append("title = %s")
params.append(item.title)
if item.year is not None:
updates.append("year = %s")
params.append(item.year)
if item.country_code is not None:
updates.append("country_code = %s")
params.append(item.country_code)
if item.watched_at is not None:
updates.append("watched_at = %s")
params.append(item.watched_at)
if item.notes is not None:
updates.append("notes = %s")
params.append(item.notes)
if not updates:
raise HTTPException(status_code=400, detail="No fields to update")
updates.append("updated_at = NOW()")
params.append(str(item_id))
query = f"""
UPDATE moviemap.watched_item
SET {', '.join(updates)}
WHERE id = %s
RETURNING id
"""
await cur.execute(query, params)
result = await cur.fetchone()
await conn.commit()
if not result:
raise HTTPException(status_code=404, detail="Watched item not found")
return {"id": str(result[0]), "status": "updated"}
@router.delete("/{item_id}")
async def delete_watched_item(item_id: UUID):
"""Delete a watched item"""
# Pool should be initialized on startup
if not pool:
from app.core.database import init_db
await init_db()
async with pool.connection() as conn:
async with conn.cursor() as cur:
query = "DELETE FROM moviemap.watched_item WHERE id = %s RETURNING id"
await cur.execute(query, (str(item_id),))
result = await cur.fetchone()
await conn.commit()
if not result:
raise HTTPException(status_code=404, detail="Watched item not found")
return {"id": str(result[0]), "status": "deleted"}

View File

View File

@@ -0,0 +1,43 @@
"""Application configuration"""
from pydantic_settings import BaseSettings
from typing import Optional
import os
class Settings(BaseSettings):
"""Application settings"""
# Server
port: int = int(os.getenv("PORT", "8080"))
host: str = "127.0.0.1"
# Database
postgres_socket_path: str = os.getenv("POSTGRES_SOCKET_PATH", "/run/postgresql")
postgres_db: str = os.getenv("POSTGRES_DB", "jawz")
postgres_user: str = os.getenv("POSTGRES_USER", os.getenv("USER", "jawz"))
# *arr API keys
sonarr_api_key: str = os.getenv("SONARR_API_KEY", "")
radarr_api_key: str = os.getenv("RADARR_API_KEY", "")
lidarr_api_key: str = os.getenv("LIDARR_API_KEY", "")
# *arr base URLs
sonarr_url: str = "http://127.0.0.1:8989"
radarr_url: str = "http://127.0.0.1:7878"
lidarr_url: str = "http://127.0.0.1:8686"
# Admin
admin_token: Optional[str] = os.getenv("MOVIEMAP_ADMIN_TOKEN")
@property
def database_url(self) -> str:
"""Build PostgreSQL connection string using Unix socket"""
return f"postgresql://{self.postgres_user}@/{self.postgres_db}?host={self.postgres_socket_path}"
class Config:
env_file = ".env"
case_sensitive = False
settings = Settings()

View File

@@ -0,0 +1,50 @@
"""Database connection and session management"""
from psycopg import AsyncConnection
from psycopg_pool import AsyncConnectionPool
from app.core.config import settings
from typing import Optional
import logging
logger = logging.getLogger(__name__)
# Connection pool
pool: Optional[AsyncConnectionPool] = None
async def init_db():
"""Initialize database connection pool"""
global pool
try:
pool = AsyncConnectionPool(
conninfo=settings.database_url,
min_size=1,
max_size=10,
open=False,
)
await pool.open()
logger.info("Database connection pool initialized")
except Exception as e:
logger.error(f"Failed to initialize database pool: {e}")
raise
async def close_db():
"""Close database connection pool"""
global pool
if pool:
await pool.close()
logger.info("Database connection pool closed")
async def get_db() -> AsyncConnection:
"""Get database connection from pool"""
if not pool:
await init_db()
return await pool.getconn()
async def return_conn(conn: AsyncConnection):
"""Return connection to pool"""
if pool:
await pool.putconn(conn)

View File

View File

@@ -0,0 +1,296 @@
"""Sync service for *arr instances"""
import httpx
import logging
from typing import Dict, List, Optional
from app.core.config import settings
from app.core.database import pool
import json
logger = logging.getLogger(__name__)
async def fetch_radarr_movies() -> List[Dict]:
"""Fetch all movies from Radarr"""
if not settings.radarr_api_key:
logger.warning("Radarr API key not configured")
return []
async with httpx.AsyncClient() as client:
try:
response = await client.get(
f"{settings.radarr_url}/api/v3/movie",
headers={"X-Api-Key": settings.radarr_api_key},
timeout=30.0
)
response.raise_for_status()
return response.json()
except Exception as e:
logger.error(f"Failed to fetch Radarr movies: {e}")
return []
async def fetch_sonarr_series() -> List[Dict]:
"""Fetch all series from Sonarr"""
if not settings.sonarr_api_key:
logger.warning("Sonarr API key not configured")
return []
async with httpx.AsyncClient() as client:
try:
response = await client.get(
f"{settings.sonarr_url}/api/v3/series",
headers={"X-Api-Key": settings.sonarr_api_key},
timeout=30.0
)
response.raise_for_status()
return response.json()
except Exception as e:
logger.error(f"Failed to fetch Sonarr series: {e}")
return []
async def fetch_lidarr_artists() -> List[Dict]:
"""Fetch all artists from Lidarr"""
if not settings.lidarr_api_key:
logger.warning("Lidarr API key not configured")
return []
async with httpx.AsyncClient() as client:
try:
response = await client.get(
f"{settings.lidarr_url}/api/v1/artist",
headers={"X-Api-Key": settings.lidarr_api_key},
timeout=30.0
)
response.raise_for_status()
return response.json()
except Exception as e:
logger.error(f"Failed to fetch Lidarr artists: {e}")
return []
def extract_country_from_radarr(movie: Dict) -> Optional[str]:
"""Extract country code from Radarr movie metadata"""
# Try productionCountries first
if "productionCountries" in movie and movie["productionCountries"]:
countries = movie["productionCountries"]
if isinstance(countries, list) and len(countries) > 0:
country = countries[0]
if isinstance(country, dict) and "iso_3166_1" in country:
return country["iso_3166_1"].upper()
elif isinstance(country, str):
# Try to map country name to code (simplified)
return None # Would need a mapping table
# Try to get from TMDB metadata if available
if "tmdbId" in movie and movie.get("movieMetadata", {}).get("productionCountries"):
countries = movie["movieMetadata"]["productionCountries"]
if isinstance(countries, list) and len(countries) > 0:
country = countries[0]
if isinstance(country, dict) and "iso_3166_1" in country:
return country["iso_3166_1"].upper()
return None
def extract_country_from_sonarr(series: Dict) -> Optional[str]:
"""Extract country code from Sonarr series metadata"""
# Sonarr doesn't always have country info directly
# Check network origin or other metadata
if "network" in series and series["network"]:
# Network name might hint at country, but not reliable
pass
# Check if there's any country metadata
if "seriesMetadata" in series:
metadata = series["seriesMetadata"]
if "originCountry" in metadata and metadata["originCountry"]:
# originCountry might be a list or string
origin = metadata["originCountry"]
if isinstance(origin, list) and len(origin) > 0:
return origin[0].upper() if len(origin[0]) == 2 else None
elif isinstance(origin, str) and len(origin) == 2:
return origin.upper()
return None
def extract_country_from_lidarr(artist: Dict) -> Optional[str]:
"""Extract country code from Lidarr artist metadata"""
# Lidarr has a country field
if "country" in artist and artist["country"]:
country = artist["country"]
if isinstance(country, str) and len(country) == 2:
return country.upper()
# Might be a country name, would need mapping
return None
async def upsert_media_item(source_kind: str, source_item_id: int, title: str,
year: Optional[int], media_type: str, arr_raw: Dict):
"""Upsert a media item into the database"""
# Pool should be initialized on startup
if not pool:
from app.core.database import init_db
await init_db()
async with pool.connection() as conn:
async with conn.cursor() as cur:
# Upsert media item
query = """
INSERT INTO moviemap.media_item
(source_kind, source_item_id, title, year, media_type, arr_raw)
VALUES (%s, %s, %s, %s, %s, %s::jsonb)
ON CONFLICT (source_kind, source_item_id)
DO UPDATE SET
title = EXCLUDED.title,
year = EXCLUDED.year,
arr_raw = EXCLUDED.arr_raw
RETURNING id
"""
await cur.execute(
query,
(source_kind, source_item_id, title, year, media_type, json.dumps(arr_raw))
)
result = await cur.fetchone()
media_item_id = result[0]
# Extract and upsert country
country_code = None
if source_kind == "radarr":
country_code = extract_country_from_radarr(arr_raw)
elif source_kind == "sonarr":
country_code = extract_country_from_sonarr(arr_raw)
elif source_kind == "lidarr":
country_code = extract_country_from_lidarr(arr_raw)
# Delete existing country associations
await cur.execute(
"DELETE FROM moviemap.media_country WHERE media_item_id = %s",
(media_item_id,)
)
# Insert new country association if found
if country_code:
await cur.execute(
"INSERT INTO moviemap.media_country (media_item_id, country_code) VALUES (%s, %s)",
(media_item_id, country_code)
)
await conn.commit()
return media_item_id
async def sync_radarr():
"""Sync movies from Radarr"""
movies = await fetch_radarr_movies()
synced = 0
for movie in movies:
try:
await upsert_media_item(
source_kind="radarr",
source_item_id=movie.get("id"),
title=movie.get("title", "Unknown"),
year=movie.get("year"),
media_type="movie",
arr_raw=movie
)
synced += 1
except Exception as e:
logger.error(f"Failed to sync movie {movie.get('id')}: {e}")
return {"radarr": synced}
async def sync_sonarr():
"""Sync series from Sonarr"""
series = await fetch_sonarr_series()
synced = 0
for s in series:
try:
await upsert_media_item(
source_kind="sonarr",
source_item_id=s.get("id"),
title=s.get("title", "Unknown"),
year=s.get("year"),
media_type="show",
arr_raw=s
)
synced += 1
except Exception as e:
logger.error(f"Failed to sync series {s.get('id')}: {e}")
return {"sonarr": synced}
async def sync_lidarr():
"""Sync artists from Lidarr"""
artists = await fetch_lidarr_artists()
synced = 0
for artist in artists:
try:
await upsert_media_item(
source_kind="lidarr",
source_item_id=artist.get("id"),
title=artist.get("artistName", "Unknown"),
year=None, # Artists don't have a year
media_type="music",
arr_raw=artist
)
synced += 1
except Exception as e:
logger.error(f"Failed to sync artist {artist.get('id')}: {e}")
return {"lidarr": synced}
async def sync_all_arrs() -> Dict:
"""Sync from all *arr instances"""
logger.info("Starting sync from all *arr instances")
results = {}
# Sync each service
try:
results.update(await sync_radarr())
except Exception as e:
logger.error(f"Radarr sync failed: {e}")
results["radarr"] = 0
try:
results.update(await sync_sonarr())
except Exception as e:
logger.error(f"Sonarr sync failed: {e}")
results["sonarr"] = 0
try:
results.update(await sync_lidarr())
except Exception as e:
logger.error(f"Lidarr sync failed: {e}")
results["lidarr"] = 0
# Update last sync time (pool should be initialized)
if not pool:
from app.core.database import init_db
await init_db()
async with pool.connection() as conn:
async with conn.cursor() as cur:
for source_kind in ["radarr", "sonarr", "lidarr"]:
await cur.execute(
"""
INSERT INTO moviemap.source (kind, base_url, enabled, last_sync_at)
VALUES (%s, %s, %s, NOW())
ON CONFLICT (kind) DO UPDATE SET last_sync_at = NOW()
""",
(source_kind, getattr(settings, f"{source_kind}_url"), True)
)
await conn.commit()
logger.info(f"Sync completed: {results}")
return results

74
backend/main.py Normal file
View File

@@ -0,0 +1,74 @@
"""
Movie Map Backend - FastAPI application
"""
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from pathlib import Path
import os
import logging
from contextlib import asynccontextmanager
from app.api import collection, watched, pins, admin
from app.core.config import settings
from app.core.database import init_db, close_db
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Startup and shutdown events"""
# Startup
logger.info("Initializing database connection...")
await init_db()
yield
# Shutdown
logger.info("Closing database connection...")
await close_db()
app = FastAPI(title="Movie Map API", version="1.0.0", lifespan=lifespan)
# CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # In production, restrict this
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# API routes
app.include_router(collection.router, prefix="/api/collection", tags=["collection"])
app.include_router(watched.router, prefix="/api/watched", tags=["watched"])
app.include_router(pins.router, prefix="/api/pins", tags=["pins"])
app.include_router(admin.router, prefix="/admin", tags=["admin"])
# Serve frontend static files
# Check multiple possible locations (dev, Nix build, etc.)
frontend_paths = [
Path(__file__).parent.parent / "frontend" / "dist", # Dev mode
Path(__file__).parent / "frontend" / "dist", # Nix build
Path(__file__).parent / "frontend", # Fallback
]
frontend_path = None
for path in frontend_paths:
if path.exists():
frontend_path = path
break
if frontend_path:
logger.info(f"Serving frontend from {frontend_path}")
app.mount("/", StaticFiles(directory=str(frontend_path), html=True), name="static")
else:
logger.warning("Frontend static files not found - API only mode")
@app.get("/api/health")
async def health():
"""Health check endpoint"""
return {"status": "ok"}

11
backend/requirements.txt Normal file
View File

@@ -0,0 +1,11 @@
fastapi==0.104.1
uvicorn[standard]==0.24.0
psycopg[binary]==3.1.18
psycopg-pool==3.2.0
alembic==1.12.1
sqlalchemy==2.0.23
httpx==0.25.2
pydantic==2.5.2
pydantic-settings==2.1.0
python-multipart==0.0.6

35
backend/run.sh Executable file
View File

@@ -0,0 +1,35 @@
#!/usr/bin/env bash
set -e
cd "$(dirname "$0")"
# Default values
export PORT=${PORT:-8080}
export POSTGRES_SOCKET_PATH=${POSTGRES_SOCKET_PATH:-/run/postgresql}
export POSTGRES_DB=${POSTGRES_DB:-jawz}
# Read secrets from files if _FILE variables are set (for sops-nix integration)
if [ -n "$SONARR_API_KEY_FILE" ] && [ -f "$SONARR_API_KEY_FILE" ]; then
export SONARR_API_KEY=$(cat "$SONARR_API_KEY_FILE")
fi
if [ -n "$RADARR_API_KEY_FILE" ] && [ -f "$RADARR_API_KEY_FILE" ]; then
export RADARR_API_KEY=$(cat "$RADARR_API_KEY_FILE")
fi
if [ -n "$LIDARR_API_KEY_FILE" ] && [ -f "$LIDARR_API_KEY_FILE" ]; then
export LIDARR_API_KEY=$(cat "$LIDARR_API_KEY_FILE")
fi
if [ -n "$MOVIEMAP_ADMIN_TOKEN_FILE" ] && [ -f "$MOVIEMAP_ADMIN_TOKEN_FILE" ]; then
export MOVIEMAP_ADMIN_TOKEN=$(cat "$MOVIEMAP_ADMIN_TOKEN_FILE")
fi
# Run migrations if needed
if [ -d "alembic/versions" ]; then
alembic upgrade head
fi
# Start the server
exec uvicorn main:app --host 127.0.0.1 --port "$PORT"