Add new API endpoints for media retrieval by country and enhance configuration
Some checks failed
Test Suite / test (push) Has been cancelled
Some checks failed
Test Suite / test (push) Has been cancelled
- Introduced `/api/tmdb` and `/api/collection/missing-locations` endpoints to the backend for improved media management. - Added a new `get_media_by_country` function in the collection API to fetch media items based on country codes. - Updated configuration to allow overriding *arr base URLs via environment variables for better flexibility. - Enhanced frontend with a new `MissingLocations` component and integrated it into the routing structure. - Improved the `CollectionMap` component to handle country selection and display media items accordingly. - Added testing dependencies in `requirements.txt` and updated frontend configuration for testing support.
This commit is contained in:
@@ -1,7 +1,8 @@
|
||||
"""Collection API endpoints"""
|
||||
from fastapi import APIRouter, Query
|
||||
from fastapi import APIRouter, Query, HTTPException
|
||||
from typing import List, Optional
|
||||
import json
|
||||
from app.core.database import init_db, pool as db_pool
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@@ -61,3 +62,70 @@ async def get_collection_summary(
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/by-country")
|
||||
async def get_media_by_country(
|
||||
country_code: str = Query(..., description="ISO 3166-1 alpha-2 country code"),
|
||||
types: Optional[str] = Query(None, description="Comma-separated list: movie,show,music")
|
||||
):
|
||||
"""
|
||||
Get list of media items for a specific country.
|
||||
Returns media items with their details.
|
||||
"""
|
||||
await init_db()
|
||||
if db_pool is None:
|
||||
raise HTTPException(status_code=503, detail="Database not available")
|
||||
|
||||
# Validate country code
|
||||
if len(country_code) != 2 or not country_code.isalpha():
|
||||
raise HTTPException(status_code=400, detail="Country code must be 2 letters (ISO 3166-1 alpha-2)")
|
||||
|
||||
country_code = country_code.upper()
|
||||
|
||||
# Parse types filter
|
||||
type_filter = []
|
||||
if types:
|
||||
type_filter = [t.strip() for t in types.split(",") if t.strip() in ["movie", "show", "music"]]
|
||||
|
||||
async with db_pool.connection() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
query = """
|
||||
SELECT
|
||||
mi.id,
|
||||
mi.source_kind,
|
||||
mi.source_item_id,
|
||||
mi.title,
|
||||
mi.year,
|
||||
mi.media_type
|
||||
FROM moviemap.media_country mc
|
||||
JOIN moviemap.media_item mi ON mc.media_item_id = mi.id
|
||||
WHERE mc.country_code = %s
|
||||
"""
|
||||
params = [country_code]
|
||||
|
||||
if type_filter:
|
||||
query += " AND mi.media_type = ANY(%s)"
|
||||
params.append(type_filter)
|
||||
|
||||
query += " ORDER BY mi.title"
|
||||
|
||||
await cur.execute(query, params)
|
||||
rows = await cur.fetchall()
|
||||
|
||||
items = []
|
||||
for row in rows:
|
||||
items.append({
|
||||
"id": str(row[0]),
|
||||
"source_kind": row[1],
|
||||
"source_item_id": row[2],
|
||||
"title": row[3],
|
||||
"year": row[4],
|
||||
"media_type": row[5],
|
||||
})
|
||||
|
||||
return {
|
||||
"country_code": country_code,
|
||||
"count": len(items),
|
||||
"items": items
|
||||
}
|
||||
|
||||
|
||||
87
backend/app/api/missing_locations.py
Normal file
87
backend/app/api/missing_locations.py
Normal file
@@ -0,0 +1,87 @@
|
||||
"""Missing locations API endpoints"""
|
||||
from fastapi import APIRouter, Query, HTTPException
|
||||
from typing import Optional
|
||||
from app.core.database import init_db, pool as db_pool
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def get_missing_locations(
|
||||
source_kind: Optional[str] = Query(None, description="Filter by source: radarr, sonarr, lidarr"),
|
||||
media_type: Optional[str] = Query(None, description="Filter by media type: movie, show, music"),
|
||||
limit: int = Query(100, ge=1, le=1000, description="Maximum number of items to return"),
|
||||
offset: int = Query(0, ge=0, description="Number of items to skip")
|
||||
):
|
||||
"""
|
||||
Get list of media items without country metadata.
|
||||
Returns paginated list of media items that don't have any country associations.
|
||||
"""
|
||||
await init_db()
|
||||
if db_pool is None:
|
||||
raise HTTPException(status_code=503, detail="Database not available")
|
||||
|
||||
async with db_pool.connection() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
# Build query
|
||||
where_clauses = ["mc.media_item_id IS NULL"]
|
||||
params = []
|
||||
|
||||
if source_kind:
|
||||
where_clauses.append("mi.source_kind = %s")
|
||||
params.append(source_kind)
|
||||
if media_type:
|
||||
where_clauses.append("mi.media_type = %s")
|
||||
params.append(media_type)
|
||||
|
||||
where_clause = " AND ".join(where_clauses)
|
||||
|
||||
# Get total count
|
||||
count_query = f"""
|
||||
SELECT COUNT(DISTINCT mi.id)
|
||||
FROM moviemap.media_item mi
|
||||
LEFT JOIN moviemap.media_country mc ON mi.id = mc.media_item_id
|
||||
WHERE {where_clause}
|
||||
"""
|
||||
await cur.execute(count_query, params)
|
||||
total_count = (await cur.fetchone())[0]
|
||||
|
||||
# Get items
|
||||
query = f"""
|
||||
SELECT
|
||||
mi.id,
|
||||
mi.source_kind,
|
||||
mi.source_item_id,
|
||||
mi.title,
|
||||
mi.year,
|
||||
mi.media_type
|
||||
FROM moviemap.media_item mi
|
||||
LEFT JOIN moviemap.media_country mc ON mi.id = mc.media_item_id
|
||||
WHERE {where_clause}
|
||||
ORDER BY mi.title
|
||||
LIMIT %s OFFSET %s
|
||||
"""
|
||||
params.extend([limit, offset])
|
||||
|
||||
await cur.execute(query, params)
|
||||
rows = await cur.fetchall()
|
||||
|
||||
items = []
|
||||
for row in rows:
|
||||
items.append({
|
||||
"id": str(row[0]),
|
||||
"source_kind": row[1],
|
||||
"source_item_id": row[2],
|
||||
"title": row[3],
|
||||
"year": row[4],
|
||||
"media_type": row[5],
|
||||
})
|
||||
|
||||
return {
|
||||
"total": total_count,
|
||||
"returned": len(items),
|
||||
"offset": offset,
|
||||
"limit": limit,
|
||||
"items": items
|
||||
}
|
||||
|
||||
71
backend/app/api/tmdb.py
Normal file
71
backend/app/api/tmdb.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""TMDB API endpoints for searching movies and TV shows"""
|
||||
from fastapi import APIRouter, Query, HTTPException
|
||||
from typing import Optional
|
||||
import httpx
|
||||
from app.core.config import settings
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
TMDB_BASE_URL = "https://api.themoviedb.org/3"
|
||||
|
||||
|
||||
@router.get("/search")
|
||||
async def search_tmdb(
|
||||
query: str = Query(..., description="Search query"),
|
||||
type: str = Query("movie", description="Type: movie or tv")
|
||||
):
|
||||
"""
|
||||
Search TMDB for movies or TV shows.
|
||||
Returns a list of results with title, year, and other metadata.
|
||||
"""
|
||||
if not settings.tmdb_api_key:
|
||||
raise HTTPException(status_code=503, detail="TMDB API key not configured")
|
||||
|
||||
if type not in ["movie", "tv"]:
|
||||
raise HTTPException(status_code=400, detail="Type must be 'movie' or 'tv'")
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{TMDB_BASE_URL}/search/{type}",
|
||||
params={
|
||||
"api_key": settings.tmdb_api_key,
|
||||
"query": query,
|
||||
"language": "en-US",
|
||||
},
|
||||
timeout=10.0
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
results = []
|
||||
for item in data.get("results", [])[:10]: # Limit to 10 results
|
||||
result = {
|
||||
"id": item.get("id"),
|
||||
"title": item.get("title") or item.get("name"),
|
||||
"year": None,
|
||||
"type": type,
|
||||
"overview": item.get("overview"),
|
||||
"poster_path": item.get("poster_path"),
|
||||
}
|
||||
|
||||
# Extract year from release_date or first_air_date
|
||||
date_str = item.get("release_date") or item.get("first_air_date")
|
||||
if date_str:
|
||||
try:
|
||||
result["year"] = int(date_str.split("-")[0])
|
||||
except (ValueError, AttributeError):
|
||||
pass
|
||||
|
||||
results.append(result)
|
||||
|
||||
return {
|
||||
"query": query,
|
||||
"type": type,
|
||||
"results": results
|
||||
}
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(status_code=e.response.status_code, detail=f"TMDB API error: {e.response.text[:200]}")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to search TMDB: {str(e)}")
|
||||
|
||||
@@ -21,10 +21,10 @@ class Settings(BaseSettings):
|
||||
radarr_api_key: str = os.getenv("RADARR_API_KEY", "")
|
||||
lidarr_api_key: str = os.getenv("LIDARR_API_KEY", "")
|
||||
|
||||
# *arr base URLs
|
||||
sonarr_url: str = "http://127.0.0.1:8989"
|
||||
radarr_url: str = "http://127.0.0.1:7878"
|
||||
lidarr_url: str = "http://127.0.0.1:8686"
|
||||
# *arr base URLs (can be overridden via environment variables)
|
||||
sonarr_url: str = os.getenv("SONARR_URL", "http://127.0.0.1:8989")
|
||||
radarr_url: str = os.getenv("RADARR_URL", "http://127.0.0.1:7878")
|
||||
lidarr_url: str = os.getenv("LIDARR_URL", "http://127.0.0.1:8686")
|
||||
|
||||
# Admin
|
||||
admin_token: Optional[str] = os.getenv("MOVIEMAP_ADMIN_TOKEN")
|
||||
|
||||
@@ -9,7 +9,7 @@ import os
|
||||
import logging
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from app.api import collection, watched, pins, admin
|
||||
from app.api import collection, watched, pins, admin, tmdb, missing_locations
|
||||
from app.core.config import settings
|
||||
from app.core.database import init_db, close_db
|
||||
|
||||
@@ -45,6 +45,8 @@ app.include_router(collection.router, prefix="/api/collection", tags=["collectio
|
||||
app.include_router(watched.router, prefix="/api/watched", tags=["watched"])
|
||||
app.include_router(pins.router, prefix="/api/pins", tags=["pins"])
|
||||
app.include_router(admin.router, prefix="/admin", tags=["admin"])
|
||||
app.include_router(tmdb.router, prefix="/api/tmdb", tags=["tmdb"])
|
||||
app.include_router(missing_locations.router, prefix="/api/collection/missing-locations", tags=["missing-locations"])
|
||||
|
||||
# Serve frontend static files
|
||||
# Check multiple possible locations (dev, Nix build, etc.)
|
||||
|
||||
@@ -8,4 +8,7 @@ httpx==0.25.2
|
||||
pydantic==2.5.2
|
||||
pydantic-settings==2.1.0
|
||||
python-multipart==0.0.6
|
||||
pytest==7.4.3
|
||||
pytest-asyncio==0.21.1
|
||||
pytest-cov==4.1.0
|
||||
|
||||
|
||||
2
backend/tests/__init__.py
Normal file
2
backend/tests/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
"""Test suite for Movie Map backend"""
|
||||
|
||||
200
backend/tests/conftest.py
Normal file
200
backend/tests/conftest.py
Normal file
@@ -0,0 +1,200 @@
|
||||
"""Pytest configuration and fixtures"""
|
||||
import pytest
|
||||
import asyncio
|
||||
import os
|
||||
from typing import AsyncGenerator
|
||||
from fastapi.testclient import TestClient
|
||||
from httpx import AsyncClient
|
||||
from app.core.database import init_db, close_db, pool as db_pool
|
||||
from app.core.config import settings
|
||||
from app.main import app
|
||||
import psycopg
|
||||
from psycopg_pool import AsyncConnectionPool
|
||||
|
||||
|
||||
# Use test database
|
||||
TEST_DB = os.getenv("TEST_POSTGRES_DB", "moviemap_test")
|
||||
TEST_USER = os.getenv("TEST_POSTGRES_USER", os.getenv("USER", "jawz"))
|
||||
TEST_SOCKET = os.getenv("TEST_POSTGRES_SOCKET_PATH", "/run/postgresql")
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def event_loop():
|
||||
"""Create an instance of the default event loop for the test session."""
|
||||
loop = asyncio.get_event_loop_policy().new_event_loop()
|
||||
yield loop
|
||||
loop.close()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
async def test_db_pool() -> AsyncGenerator[AsyncConnectionPool, None]:
|
||||
"""Create a test database connection pool"""
|
||||
test_db_url = f"postgresql://{TEST_USER}@/{TEST_DB}?host={TEST_SOCKET}"
|
||||
|
||||
pool = AsyncConnectionPool(
|
||||
conninfo=test_db_url,
|
||||
min_size=1,
|
||||
max_size=5,
|
||||
open=False,
|
||||
)
|
||||
await pool.open()
|
||||
|
||||
# Run migrations
|
||||
async with pool.connection() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
# Create schema if not exists
|
||||
await cur.execute("CREATE SCHEMA IF NOT EXISTS moviemap")
|
||||
|
||||
# Create enums
|
||||
await cur.execute("""
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE moviemap.source_kind AS ENUM ('radarr', 'sonarr', 'lidarr');
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
""")
|
||||
|
||||
await cur.execute("""
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE moviemap.media_type AS ENUM ('movie', 'show', 'music');
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
""")
|
||||
|
||||
await cur.execute("""
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE moviemap.watched_media_type AS ENUM ('movie', 'show');
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
""")
|
||||
|
||||
# Create tables
|
||||
await cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS moviemap.source (
|
||||
id SERIAL PRIMARY KEY,
|
||||
kind moviemap.source_kind NOT NULL UNIQUE,
|
||||
base_url TEXT NOT NULL,
|
||||
enabled BOOLEAN NOT NULL DEFAULT true,
|
||||
last_sync_at TIMESTAMPTZ
|
||||
)
|
||||
""")
|
||||
|
||||
await cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS moviemap.media_item (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
source_kind moviemap.source_kind NOT NULL,
|
||||
source_item_id INTEGER NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
year INTEGER,
|
||||
media_type moviemap.media_type NOT NULL,
|
||||
arr_raw JSONB,
|
||||
UNIQUE (source_kind, source_item_id)
|
||||
)
|
||||
""")
|
||||
|
||||
await cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS moviemap.media_country (
|
||||
media_item_id UUID NOT NULL REFERENCES moviemap.media_item(id) ON DELETE CASCADE,
|
||||
country_code CHAR(2) NOT NULL,
|
||||
weight SMALLINT NOT NULL DEFAULT 1,
|
||||
PRIMARY KEY (media_item_id, country_code)
|
||||
)
|
||||
""")
|
||||
|
||||
await cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS moviemap.watched_item (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
media_type moviemap.watched_media_type NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
year INTEGER,
|
||||
country_code CHAR(2) NOT NULL,
|
||||
watched_at TIMESTAMPTZ,
|
||||
notes TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
)
|
||||
""")
|
||||
|
||||
await cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS moviemap.manual_pin (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
country_code CHAR(2) NOT NULL,
|
||||
label TEXT,
|
||||
pinned_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
)
|
||||
""")
|
||||
|
||||
await conn.commit()
|
||||
|
||||
yield pool
|
||||
|
||||
# Cleanup: drop all data but keep schema
|
||||
async with pool.connection() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
await cur.execute("TRUNCATE TABLE moviemap.media_country CASCADE")
|
||||
await cur.execute("TRUNCATE TABLE moviemap.media_item CASCADE")
|
||||
await cur.execute("TRUNCATE TABLE moviemap.watched_item CASCADE")
|
||||
await cur.execute("TRUNCATE TABLE moviemap.manual_pin CASCADE")
|
||||
await cur.execute("TRUNCATE TABLE moviemap.source CASCADE")
|
||||
await conn.commit()
|
||||
|
||||
await pool.close()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
async def test_client(test_db_pool) -> AsyncGenerator[AsyncClient, None]:
|
||||
"""Create a test client with test database"""
|
||||
# Temporarily replace the global pool
|
||||
import app.core.database
|
||||
original_pool = app.core.database.pool
|
||||
app.core.database.pool = test_db_pool
|
||||
|
||||
async with AsyncClient(app=app, base_url="http://test") as client:
|
||||
yield client
|
||||
|
||||
# Restore original pool
|
||||
app.core.database.pool = original_pool
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_radarr_response():
|
||||
"""Mock Radarr API response"""
|
||||
return [
|
||||
{
|
||||
"id": 1,
|
||||
"title": "Test Movie",
|
||||
"year": 2020,
|
||||
"tmdbId": 12345,
|
||||
"productionCountries": [{"iso_3166_1": "US"}]
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_sonarr_response():
|
||||
"""Mock Sonarr API response"""
|
||||
return [
|
||||
{
|
||||
"id": 1,
|
||||
"title": "Test Show",
|
||||
"year": 2020,
|
||||
"tmdbId": 67890,
|
||||
"seriesMetadata": {"originCountry": ["US"]}
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_lidarr_response():
|
||||
"""Mock Lidarr API response"""
|
||||
return [
|
||||
{
|
||||
"id": 1,
|
||||
"artistName": "Test Artist",
|
||||
"country": "US",
|
||||
"foreignArtistId": "test-mbid-123"
|
||||
}
|
||||
]
|
||||
|
||||
261
backend/tests/test_api.py
Normal file
261
backend/tests/test_api.py
Normal file
@@ -0,0 +1,261 @@
|
||||
"""Tests for API endpoints"""
|
||||
import pytest
|
||||
from uuid import uuid4
|
||||
import json
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_health_endpoint(test_client):
|
||||
"""Test health check endpoint"""
|
||||
response = await test_client.get("/api/health")
|
||||
assert response.status_code == 200
|
||||
assert response.json() == {"status": "ok"}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_collection_summary_empty(test_client, test_db_pool):
|
||||
"""Test collection summary with no data"""
|
||||
response = await test_client.get("/api/collection/summary")
|
||||
assert response.status_code == 200
|
||||
assert response.json() == {}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_collection_summary_with_data(test_client, test_db_pool):
|
||||
"""Test collection summary with media items"""
|
||||
# Insert test data
|
||||
async with test_db_pool.connection() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
# Insert media item
|
||||
await cur.execute("""
|
||||
INSERT INTO moviemap.media_item
|
||||
(source_kind, source_item_id, title, year, media_type, arr_raw)
|
||||
VALUES ('radarr', 1, 'Test Movie', 2020, 'movie', '{}'::jsonb)
|
||||
RETURNING id
|
||||
""")
|
||||
media_id = (await cur.fetchone())[0]
|
||||
|
||||
# Insert country association
|
||||
await cur.execute("""
|
||||
INSERT INTO moviemap.media_country (media_item_id, country_code)
|
||||
VALUES (%s, 'US')
|
||||
""", (media_id,))
|
||||
|
||||
await conn.commit()
|
||||
|
||||
response = await test_client.get("/api/collection/summary")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "US" in data
|
||||
assert data["US"]["movie"] == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_collection_summary_filtered(test_client, test_db_pool):
|
||||
"""Test collection summary with type filters"""
|
||||
# Insert test data
|
||||
async with test_db_pool.connection() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
# Insert movie
|
||||
await cur.execute("""
|
||||
INSERT INTO moviemap.media_item
|
||||
(source_kind, source_item_id, title, year, media_type, arr_raw)
|
||||
VALUES ('radarr', 1, 'Test Movie', 2020, 'movie', '{}'::jsonb)
|
||||
RETURNING id
|
||||
""")
|
||||
movie_id = (await cur.fetchone())[0]
|
||||
await cur.execute("""
|
||||
INSERT INTO moviemap.media_country (media_item_id, country_code)
|
||||
VALUES (%s, 'US')
|
||||
""", (movie_id,))
|
||||
|
||||
# Insert show
|
||||
await cur.execute("""
|
||||
INSERT INTO moviemap.media_item
|
||||
(source_kind, source_item_id, title, year, media_type, arr_raw)
|
||||
VALUES ('sonarr', 1, 'Test Show', 2020, 'show', '{}'::jsonb)
|
||||
RETURNING id
|
||||
""")
|
||||
show_id = (await cur.fetchone())[0]
|
||||
await cur.execute("""
|
||||
INSERT INTO moviemap.media_country (media_item_id, country_code)
|
||||
VALUES (%s, 'US')
|
||||
""", (show_id,))
|
||||
|
||||
await conn.commit()
|
||||
|
||||
# Test with movie filter
|
||||
response = await test_client.get("/api/collection/summary?types=movie")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "US" in data
|
||||
assert data["US"]["movie"] == 1
|
||||
assert "show" not in data["US"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_watched_list_empty(test_client, test_db_pool):
|
||||
"""Test watched items list with no data"""
|
||||
response = await test_client.get("/api/watched")
|
||||
assert response.status_code == 200
|
||||
assert response.json() == []
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_watched_create(test_client, test_db_pool):
|
||||
"""Test creating a watched item"""
|
||||
response = await test_client.post(
|
||||
"/api/watched",
|
||||
json={
|
||||
"media_type": "movie",
|
||||
"title": "Test Movie",
|
||||
"year": 2020,
|
||||
"country_code": "US",
|
||||
"notes": "Test notes"
|
||||
}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "id" in data
|
||||
assert data["status"] == "created"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_watched_list_with_data(test_client, test_db_pool):
|
||||
"""Test watched items list with data"""
|
||||
# Create watched item
|
||||
create_response = await test_client.post(
|
||||
"/api/watched",
|
||||
json={
|
||||
"media_type": "movie",
|
||||
"title": "Test Movie",
|
||||
"country_code": "US"
|
||||
}
|
||||
)
|
||||
assert create_response.status_code == 200
|
||||
|
||||
# List watched items
|
||||
response = await test_client.get("/api/watched")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) == 1
|
||||
assert data[0]["title"] == "Test Movie"
|
||||
assert data[0]["country_code"] == "US"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_watched_delete(test_client, test_db_pool):
|
||||
"""Test deleting a watched item"""
|
||||
# Create watched item
|
||||
create_response = await test_client.post(
|
||||
"/api/watched",
|
||||
json={
|
||||
"media_type": "movie",
|
||||
"title": "Test Movie",
|
||||
"country_code": "US"
|
||||
}
|
||||
)
|
||||
item_id = create_response.json()["id"]
|
||||
|
||||
# Delete it
|
||||
response = await test_client.delete(f"/api/watched/{item_id}")
|
||||
assert response.status_code == 200
|
||||
assert response.json()["status"] == "deleted"
|
||||
|
||||
# Verify it's gone
|
||||
list_response = await test_client.get("/api/watched")
|
||||
assert len(list_response.json()) == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_watched_summary(test_client, test_db_pool):
|
||||
"""Test watched items summary"""
|
||||
# Create watched items
|
||||
await test_client.post(
|
||||
"/api/watched",
|
||||
json={
|
||||
"media_type": "movie",
|
||||
"title": "Test Movie",
|
||||
"country_code": "US",
|
||||
"watched_at": "2024-01-01T00:00:00Z"
|
||||
}
|
||||
)
|
||||
await test_client.post(
|
||||
"/api/watched",
|
||||
json={
|
||||
"media_type": "show",
|
||||
"title": "Test Show",
|
||||
"country_code": "US",
|
||||
"watched_at": "2024-01-01T00:00:00Z"
|
||||
}
|
||||
)
|
||||
|
||||
response = await test_client.get("/api/watched/summary")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "US" in data
|
||||
assert data["US"]["movie"] == 1
|
||||
assert data["US"]["show"] == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_pins_list_empty(test_client, test_db_pool):
|
||||
"""Test pins list with no data"""
|
||||
response = await test_client.get("/api/pins")
|
||||
assert response.status_code == 200
|
||||
assert response.json() == []
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_pins_create(test_client, test_db_pool):
|
||||
"""Test creating a pin"""
|
||||
response = await test_client.post(
|
||||
"/api/pins",
|
||||
json={
|
||||
"country_code": "US",
|
||||
"label": "Test Pin"
|
||||
}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "id" in data
|
||||
assert data["status"] == "created"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_pins_delete(test_client, test_db_pool):
|
||||
"""Test deleting a pin"""
|
||||
# Create pin
|
||||
create_response = await test_client.post(
|
||||
"/api/pins",
|
||||
json={"country_code": "US"}
|
||||
)
|
||||
pin_id = create_response.json()["id"]
|
||||
|
||||
# Delete it
|
||||
response = await test_client.delete(f"/api/pins/{pin_id}")
|
||||
assert response.status_code == 200
|
||||
assert response.json()["status"] == "deleted"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_admin_sync_no_auth(test_client, test_db_pool):
|
||||
"""Test admin sync without auth (should work if no token configured)"""
|
||||
# This will fail if admin token is required, but should work if not
|
||||
# We'll mock the sync function to avoid actual API calls
|
||||
response = await test_client.post("/admin/sync")
|
||||
# Either 200 (no auth required) or 401 (auth required)
|
||||
assert response.status_code in [200, 401, 500] # 500 if sync fails due to missing *arr
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_missing_countries_empty(test_client, test_db_pool):
|
||||
"""Test missing countries endpoint with no data"""
|
||||
response = await test_client.get("/admin/missing-countries")
|
||||
# May require auth, but if not, should return empty
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
assert data["total"] == 0
|
||||
assert data["returned"] == 0
|
||||
assert data["items"] == []
|
||||
|
||||
153
backend/tests/test_database.py
Normal file
153
backend/tests/test_database.py
Normal file
@@ -0,0 +1,153 @@
|
||||
"""Tests for database operations"""
|
||||
import pytest
|
||||
import json
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_media_item_insert(test_db_pool):
|
||||
"""Test inserting a media item"""
|
||||
async with test_db_pool.connection() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
await cur.execute("""
|
||||
INSERT INTO moviemap.media_item
|
||||
(source_kind, source_item_id, title, year, media_type, arr_raw)
|
||||
VALUES ('radarr', 1, 'Test Movie', 2020, 'movie', %s::jsonb)
|
||||
RETURNING id, title
|
||||
""", (json.dumps({"test": "data"}),))
|
||||
|
||||
result = await cur.fetchone()
|
||||
assert result is not None
|
||||
assert result[1] == "Test Movie"
|
||||
|
||||
await conn.commit()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_media_country_association(test_db_pool):
|
||||
"""Test associating a country with a media item"""
|
||||
async with test_db_pool.connection() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
# Insert media item
|
||||
await cur.execute("""
|
||||
INSERT INTO moviemap.media_item
|
||||
(source_kind, source_item_id, title, year, media_type, arr_raw)
|
||||
VALUES ('radarr', 1, 'Test Movie', 2020, 'movie', '{}'::jsonb)
|
||||
RETURNING id
|
||||
""")
|
||||
media_id = (await cur.fetchone())[0]
|
||||
|
||||
# Associate country
|
||||
await cur.execute("""
|
||||
INSERT INTO moviemap.media_country (media_item_id, country_code)
|
||||
VALUES (%s, 'US')
|
||||
""", (media_id,))
|
||||
|
||||
# Verify association
|
||||
await cur.execute("""
|
||||
SELECT country_code FROM moviemap.media_country
|
||||
WHERE media_item_id = %s
|
||||
""", (media_id,))
|
||||
|
||||
result = await cur.fetchone()
|
||||
assert result is not None
|
||||
assert result[0] == "US"
|
||||
|
||||
await conn.commit()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_media_item_unique_constraint(test_db_pool):
|
||||
"""Test that source_kind + source_item_id is unique"""
|
||||
async with test_db_pool.connection() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
# Insert first item
|
||||
await cur.execute("""
|
||||
INSERT INTO moviemap.media_item
|
||||
(source_kind, source_item_id, title, year, media_type, arr_raw)
|
||||
VALUES ('radarr', 1, 'Test Movie', 2020, 'movie', '{}'::jsonb)
|
||||
""")
|
||||
|
||||
# Try to insert duplicate
|
||||
with pytest.raises(Exception): # Should raise unique constraint violation
|
||||
await cur.execute("""
|
||||
INSERT INTO moviemap.media_item
|
||||
(source_kind, source_item_id, title, year, media_type, arr_raw)
|
||||
VALUES ('radarr', 1, 'Another Movie', 2021, 'movie', '{}'::jsonb)
|
||||
""")
|
||||
|
||||
await conn.rollback()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_watched_item_insert(test_db_pool):
|
||||
"""Test inserting a watched item"""
|
||||
async with test_db_pool.connection() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
await cur.execute("""
|
||||
INSERT INTO moviemap.watched_item
|
||||
(media_type, title, year, country_code, notes)
|
||||
VALUES ('movie', 'Test Movie', 2020, 'US', 'Test notes')
|
||||
RETURNING id, title
|
||||
""")
|
||||
|
||||
result = await cur.fetchone()
|
||||
assert result is not None
|
||||
assert result[1] == "Test Movie"
|
||||
|
||||
await conn.commit()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_manual_pin_insert(test_db_pool):
|
||||
"""Test inserting a manual pin"""
|
||||
async with test_db_pool.connection() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
await cur.execute("""
|
||||
INSERT INTO moviemap.manual_pin (country_code, label)
|
||||
VALUES ('US', 'Test Pin')
|
||||
RETURNING id, country_code
|
||||
""")
|
||||
|
||||
result = await cur.fetchone()
|
||||
assert result is not None
|
||||
assert result[1] == "US"
|
||||
|
||||
await conn.commit()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cascade_delete(test_db_pool):
|
||||
"""Test that deleting a media item cascades to country associations"""
|
||||
async with test_db_pool.connection() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
# Insert media item with country
|
||||
await cur.execute("""
|
||||
INSERT INTO moviemap.media_item
|
||||
(source_kind, source_item_id, title, year, media_type, arr_raw)
|
||||
VALUES ('radarr', 1, 'Test Movie', 2020, 'movie', '{}'::jsonb)
|
||||
RETURNING id
|
||||
""")
|
||||
media_id = (await cur.fetchone())[0]
|
||||
|
||||
await cur.execute("""
|
||||
INSERT INTO moviemap.media_country (media_item_id, country_code)
|
||||
VALUES (%s, 'US')
|
||||
""", (media_id,))
|
||||
|
||||
# Delete media item
|
||||
await cur.execute("""
|
||||
DELETE FROM moviemap.media_item WHERE id = %s
|
||||
""", (media_id,))
|
||||
|
||||
# Verify country association is also deleted
|
||||
await cur.execute("""
|
||||
SELECT COUNT(*) FROM moviemap.media_country
|
||||
WHERE media_item_id = %s
|
||||
""", (media_id,))
|
||||
|
||||
count = (await cur.fetchone())[0]
|
||||
assert count == 0
|
||||
|
||||
await conn.commit()
|
||||
|
||||
204
backend/tests/test_sync.py
Normal file
204
backend/tests/test_sync.py
Normal file
@@ -0,0 +1,204 @@
|
||||
"""Tests for sync service"""
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, patch
|
||||
from app.services.sync import (
|
||||
extract_country_from_radarr,
|
||||
extract_country_from_sonarr,
|
||||
extract_country_from_lidarr,
|
||||
upsert_media_item,
|
||||
)
|
||||
|
||||
|
||||
def test_extract_country_from_radarr_with_production_countries():
|
||||
"""Test extracting country from Radarr movie with productionCountries"""
|
||||
movie = {
|
||||
"id": 1,
|
||||
"title": "Test Movie",
|
||||
"productionCountries": [{"iso_3166_1": "US"}]
|
||||
}
|
||||
country = extract_country_from_radarr(movie)
|
||||
assert country == "US"
|
||||
|
||||
|
||||
def test_extract_country_from_radarr_with_metadata():
|
||||
"""Test extracting country from Radarr movie with movieMetadata"""
|
||||
movie = {
|
||||
"id": 1,
|
||||
"title": "Test Movie",
|
||||
"movieMetadata": {
|
||||
"productionCountries": [{"iso_3166_1": "GB"}]
|
||||
}
|
||||
}
|
||||
country = extract_country_from_radarr(movie)
|
||||
assert country == "GB"
|
||||
|
||||
|
||||
def test_extract_country_from_radarr_no_country():
|
||||
"""Test extracting country from Radarr movie with no country"""
|
||||
movie = {
|
||||
"id": 1,
|
||||
"title": "Test Movie"
|
||||
}
|
||||
country = extract_country_from_radarr(movie)
|
||||
assert country is None
|
||||
|
||||
|
||||
def test_extract_country_from_sonarr_with_metadata():
|
||||
"""Test extracting country from Sonarr series with seriesMetadata"""
|
||||
series = {
|
||||
"id": 1,
|
||||
"title": "Test Show",
|
||||
"seriesMetadata": {
|
||||
"originCountry": ["US"]
|
||||
}
|
||||
}
|
||||
country = extract_country_from_sonarr(series)
|
||||
assert country == "US"
|
||||
|
||||
|
||||
def test_extract_country_from_sonarr_string_country():
|
||||
"""Test extracting country from Sonarr series with string country"""
|
||||
series = {
|
||||
"id": 1,
|
||||
"title": "Test Show",
|
||||
"seriesMetadata": {
|
||||
"originCountry": "US"
|
||||
}
|
||||
}
|
||||
country = extract_country_from_sonarr(series)
|
||||
assert country == "US"
|
||||
|
||||
|
||||
def test_extract_country_from_lidarr_with_country():
|
||||
"""Test extracting country from Lidarr artist with country field"""
|
||||
artist = {
|
||||
"id": 1,
|
||||
"artistName": "Test Artist",
|
||||
"country": "US"
|
||||
}
|
||||
country = extract_country_from_lidarr(artist)
|
||||
assert country == "US"
|
||||
|
||||
|
||||
def test_extract_country_from_lidarr_no_country():
|
||||
"""Test extracting country from Lidarr artist with no country"""
|
||||
artist = {
|
||||
"id": 1,
|
||||
"artistName": "Test Artist"
|
||||
}
|
||||
country = extract_country_from_lidarr(artist)
|
||||
assert country is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_upsert_media_item_new(test_db_pool):
|
||||
"""Test upserting a new media item"""
|
||||
# Temporarily replace the global pool
|
||||
import app.core.database
|
||||
original_pool = app.core.database.pool
|
||||
app.core.database.pool = test_db_pool
|
||||
|
||||
try:
|
||||
media_id = await upsert_media_item(
|
||||
source_kind="radarr",
|
||||
source_item_id=1,
|
||||
title="Test Movie",
|
||||
year=2020,
|
||||
media_type="movie",
|
||||
arr_raw={"id": 1, "title": "Test Movie", "productionCountries": [{"iso_3166_1": "US"}]}
|
||||
)
|
||||
|
||||
assert media_id is not None
|
||||
|
||||
# Verify it was inserted
|
||||
async with test_db_pool.connection() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
await cur.execute("""
|
||||
SELECT title, year, media_type FROM moviemap.media_item
|
||||
WHERE id = %s
|
||||
""", (media_id,))
|
||||
result = await cur.fetchone()
|
||||
assert result is not None
|
||||
assert result[0] == "Test Movie"
|
||||
assert result[1] == 2020
|
||||
assert result[2] == "movie"
|
||||
finally:
|
||||
app.core.database.pool = original_pool
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_upsert_media_item_with_country(test_db_pool):
|
||||
"""Test upserting a media item with country association"""
|
||||
# Temporarily replace the global pool
|
||||
import app.core.database
|
||||
original_pool = app.core.database.pool
|
||||
app.core.database.pool = test_db_pool
|
||||
|
||||
try:
|
||||
media_id = await upsert_media_item(
|
||||
source_kind="radarr",
|
||||
source_item_id=1,
|
||||
title="Test Movie",
|
||||
year=2020,
|
||||
media_type="movie",
|
||||
arr_raw={"id": 1, "title": "Test Movie", "productionCountries": [{"iso_3166_1": "US"}]}
|
||||
)
|
||||
|
||||
# Verify country was associated
|
||||
async with test_db_pool.connection() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
await cur.execute("""
|
||||
SELECT country_code FROM moviemap.media_country
|
||||
WHERE media_item_id = %s
|
||||
""", (media_id,))
|
||||
result = await cur.fetchone()
|
||||
assert result is not None
|
||||
assert result[0] == "US"
|
||||
finally:
|
||||
app.core.database.pool = original_pool
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_upsert_media_item_update(test_db_pool):
|
||||
"""Test updating an existing media item"""
|
||||
# Temporarily replace the global pool
|
||||
import app.core.database
|
||||
original_pool = app.core.database.pool
|
||||
app.core.database.pool = test_db_pool
|
||||
|
||||
try:
|
||||
# Insert first
|
||||
media_id = await upsert_media_item(
|
||||
source_kind="radarr",
|
||||
source_item_id=1,
|
||||
title="Test Movie",
|
||||
year=2020,
|
||||
media_type="movie",
|
||||
arr_raw={"id": 1, "title": "Test Movie"}
|
||||
)
|
||||
|
||||
# Update with new title
|
||||
updated_id = await upsert_media_item(
|
||||
source_kind="radarr",
|
||||
source_item_id=1,
|
||||
title="Updated Movie",
|
||||
year=2021,
|
||||
media_type="movie",
|
||||
arr_raw={"id": 1, "title": "Updated Movie"}
|
||||
)
|
||||
|
||||
assert updated_id == media_id # Same ID
|
||||
|
||||
# Verify it was updated
|
||||
async with test_db_pool.connection() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
await cur.execute("""
|
||||
SELECT title, year FROM moviemap.media_item
|
||||
WHERE id = %s
|
||||
""", (media_id,))
|
||||
result = await cur.fetchone()
|
||||
assert result[0] == "Updated Movie"
|
||||
assert result[1] == 2021
|
||||
finally:
|
||||
app.core.database.pool = original_pool
|
||||
|
||||
Reference in New Issue
Block a user