From 4337a8847cd2dc8beb63dc173129859f5d1033e7 Mon Sep 17 00:00:00 2001 From: Danilo Reyes Date: Sat, 7 Feb 2026 06:15:34 -0600 Subject: [PATCH] wowaweewa --- .gitignore | 19 ++ README.md | 17 ++ backend/Cargo.toml | 18 ++ backend/rustfmt.toml | 1 + backend/src/api/mod.rs | 2 + backend/src/api/untagged.rs | 145 ++++++++++++ backend/src/api/untagged_delete.rs | 153 +++++++++++++ backend/src/config.rs | 126 +++++++++++ backend/src/error.rs | 21 ++ backend/src/main.rs | 50 +++++ backend/src/services/audit_log.rs | 62 ++++++ backend/src/services/collage_sampler.rs | 83 +++++++ backend/src/services/list_file.rs | 85 +++++++ backend/src/services/mod.rs | 10 + backend/src/services/ops.rs | 130 +++++++++++ backend/src/services/ops_lock.rs | 18 ++ backend/src/services/path_guard.rs | 48 ++++ backend/src/services/preview_action.rs | 83 +++++++ backend/src/services/read_only.rs | 27 +++ backend/src/services/state_store.rs | 23 ++ backend/src/services/untagged_queue.rs | 97 ++++++++ backend/src/state.rs | 42 ++++ frontend/.prettierrc | 4 + frontend/package.json | 21 ++ .../src/components/list-file-matches.svelte | 55 +++++ .../src/components/untagged-controls.svelte | 41 ++++ frontend/src/pages/untagged-collage.svelte | 210 ++++++++++++++++++ frontend/src/services/untagged_api.ts | 70 ++++++ specs/001-archive-curator/tasks.md | 76 +++---- 29 files changed, 1699 insertions(+), 38 deletions(-) create mode 100644 README.md create mode 100644 backend/Cargo.toml create mode 100644 backend/rustfmt.toml create mode 100644 backend/src/api/mod.rs create mode 100644 backend/src/api/untagged.rs create mode 100644 backend/src/api/untagged_delete.rs create mode 100644 backend/src/config.rs create mode 100644 backend/src/error.rs create mode 100644 backend/src/main.rs create mode 100644 backend/src/services/audit_log.rs create mode 100644 backend/src/services/collage_sampler.rs create mode 100644 backend/src/services/list_file.rs create mode 100644 backend/src/services/mod.rs create mode 100644 backend/src/services/ops.rs create mode 100644 backend/src/services/ops_lock.rs create mode 100644 backend/src/services/path_guard.rs create mode 100644 backend/src/services/preview_action.rs create mode 100644 backend/src/services/read_only.rs create mode 100644 backend/src/services/state_store.rs create mode 100644 backend/src/services/untagged_queue.rs create mode 100644 backend/src/state.rs create mode 100644 frontend/.prettierrc create mode 100644 frontend/package.json create mode 100644 frontend/src/components/list-file-matches.svelte create mode 100644 frontend/src/components/untagged-controls.svelte create mode 100644 frontend/src/pages/untagged-collage.svelte create mode 100644 frontend/src/services/untagged_api.ts diff --git a/.gitignore b/.gitignore index 718560b..c2d0b5f 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,22 @@ .DS_Store Thumbs.db *~ +.vscode/ +.idea/ +*.tmp +*.swp + +# Rust build/output +target/ +debug/ +release/ +*.rs.bk +*.rlib +*.prof* + +# Node/Svelte build/output +node_modules/ +dist/ +build/ +*.log +.env* diff --git a/README.md b/README.md new file mode 100644 index 0000000..ac0bb66 --- /dev/null +++ b/README.md @@ -0,0 +1,17 @@ +# Archive Curator + +## Local Run Notes + +- Backend: Rust (Axum) service in `backend/` +- Frontend: Svelte-based UI in `frontend/` + +### Planned Commands + +- Backend tests: `cargo test` +- Backend lint: `cargo clippy` +- Frontend scripts: `npm run dev` / `npm run build` + +### Safety Defaults + +This project is designed for local-only operation with strict safety gates: +read-only mode, preview/confirm workflows, and append-only audit logging. diff --git a/backend/Cargo.toml b/backend/Cargo.toml new file mode 100644 index 0000000..15ee8bf --- /dev/null +++ b/backend/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "archive-curator-backend" +version = "0.1.0" +edition = "2021" + +[dependencies] +axum = "0.7" +chrono = { version = "0.4", features = ["serde"] } +serde = { version = "1", features = ["derive"] } +serde_json = "1" +sqlx = { version = "0.7", features = ["runtime-tokio", "sqlite", "macros"] } +tokio = { version = "1", features = ["macros", "rt-multi-thread"] } +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt"] } +thiserror = "1" +anyhow = "1" +uuid = { version = "1", features = ["v4", "serde"] } +rand = "0.8" diff --git a/backend/rustfmt.toml b/backend/rustfmt.toml new file mode 100644 index 0000000..3a26366 --- /dev/null +++ b/backend/rustfmt.toml @@ -0,0 +1 @@ +edition = "2021" diff --git a/backend/src/api/mod.rs b/backend/src/api/mod.rs new file mode 100644 index 0000000..ffe798b --- /dev/null +++ b/backend/src/api/mod.rs @@ -0,0 +1,2 @@ +pub mod untagged; +pub mod untagged_delete; diff --git a/backend/src/api/untagged.rs b/backend/src/api/untagged.rs new file mode 100644 index 0000000..c3fbb48 --- /dev/null +++ b/backend/src/api/untagged.rs @@ -0,0 +1,145 @@ +use std::fs; +use std::path::Path; + +use axum::{ + extract::{Path as AxumPath, State}, + http::StatusCode, + response::IntoResponse, + routing::{get, post}, + Json, Router, +}; +use serde::{Deserialize, Serialize}; + +use crate::services::collage_sampler::MediaItem; +use crate::state::AppState; + +#[derive(Debug, Serialize, Deserialize)] +pub struct UntaggedCollage { + pub directory_id: String, + pub directory_name: String, + pub total_size_bytes: u64, + pub file_count: u64, + pub samples: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct DecisionResult { + pub outcome: String, + pub audit_entry_id: String, +} + +pub fn router(state: AppState) -> Router { + Router::new() + .route("/directories/untagged/next", get(next_untagged)) + .route( + "/directories/untagged/:directory_id/resample", + post(resample_collage), + ) + .route( + "/directories/untagged/:directory_id/keep", + post(keep_directory), + ) + .with_state(state) +} + +async fn next_untagged(State(state): State) -> Result, StatusCode> { + let directory = state + .untagged_queue + .next_directory() + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)? + .ok_or(StatusCode::NOT_FOUND)?; + + let samples = state + .collage_sampler + .sample(&directory.id, &directory.absolute_path, 12) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + + Ok(Json(UntaggedCollage { + directory_id: directory.id, + directory_name: directory.name, + total_size_bytes: directory.total_size_bytes, + file_count: directory.file_count, + samples, + })) +} + +async fn resample_collage( + State(state): State, + AxumPath(directory_id): AxumPath, +) -> Result, StatusCode> { + let directory_path = state + .untagged_queue + .resolve_directory(&directory_id) + .map_err(|_| StatusCode::BAD_REQUEST)?; + let directory_name = directory_path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or_default() + .to_string(); + let (total_size_bytes, file_count) = dir_stats(&directory_path) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + + let samples = state + .collage_sampler + .sample(&directory_id, &directory_path, 12) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + + Ok(Json(UntaggedCollage { + directory_id, + directory_name, + total_size_bytes, + file_count, + samples, + })) +} + +async fn keep_directory( + State(state): State, + AxumPath(directory_id): AxumPath, +) -> Result { + state + .read_only + .ensure_writable() + .map_err(|_| StatusCode::CONFLICT)?; + let directory_path = state + .untagged_queue + .resolve_directory(&directory_id) + .map_err(|_| StatusCode::BAD_REQUEST)?; + let destination = state + .ops + .keep_directory(&directory_path) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + let entry = state + .audit_log + .append_mutation( + "keep_directory", + vec![directory_path.display().to_string(), destination.display().to_string()], + Vec::new(), + "ok", + None, + ) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + + Ok(Json(DecisionResult { + outcome: "kept".to_string(), + audit_entry_id: entry.id.to_string(), + })) +} + +fn dir_stats(path: &Path) -> std::io::Result<(u64, u64)> { + let mut total_size = 0u64; + let mut file_count = 0u64; + for entry in fs::read_dir(path)? { + let entry = entry?; + let meta = entry.metadata()?; + if meta.is_dir() { + let (size, count) = dir_stats(&entry.path())?; + total_size += size; + file_count += count; + } else if meta.is_file() { + total_size += meta.len(); + file_count += 1; + } + } + Ok((total_size, file_count)) +} diff --git a/backend/src/api/untagged_delete.rs b/backend/src/api/untagged_delete.rs new file mode 100644 index 0000000..f0f7bce --- /dev/null +++ b/backend/src/api/untagged_delete.rs @@ -0,0 +1,153 @@ +use axum::{ + extract::{Path as AxumPath, State}, + http::StatusCode, + routing::post, + Json, Router, +}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::services::list_file::{apply_removals_atomic, load_entries, match_entries, preview_removals}; +use crate::services::preview_action::{PreviewAction, PreviewActionType}; +use crate::state::AppState; + +#[derive(Debug, Serialize, Deserialize)] +pub struct DeletePreview { + pub preview_id: String, + pub target_paths: Vec, + pub list_file_changes_preview: Vec, + pub can_proceed: bool, + pub read_only_mode: bool, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct DeleteConfirm { + pub preview_id: String, + pub remove_from_list_file: bool, + #[serde(default)] + pub selected_matches: Option>, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct DecisionResult { + pub outcome: String, + pub audit_entry_id: String, +} + +pub fn router(state: AppState) -> Router { + Router::new() + .route( + "/directories/untagged/:directory_id/preview-delete", + post(preview_delete), + ) + .route( + "/directories/untagged/:directory_id/confirm-delete", + post(confirm_delete), + ) + .with_state(state) +} + +async fn preview_delete( + State(state): State, + AxumPath(directory_id): AxumPath, +) -> Result, StatusCode> { + state + .read_only + .ensure_writable() + .map_err(|_| StatusCode::CONFLICT)?; + + let directory_path = state + .untagged_queue + .resolve_directory(&directory_id) + .map_err(|_| StatusCode::BAD_REQUEST)?; + let directory_name = directory_path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or_default() + .to_string(); + + let mut entries = load_entries(&state.config.download_list_path) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + let _ = match_entries(&mut entries, &[directory_name]); + let (_remaining, removed) = preview_removals(&entries); + + let action = PreviewAction::new( + PreviewActionType::DirectoryDelete, + vec![directory_path.display().to_string()], + removed.clone(), + ); + let action = state.preview_store.create(action); + + Ok(Json(DeletePreview { + preview_id: action.id.to_string(), + target_paths: action.target_paths, + list_file_changes_preview: action.list_file_changes_preview, + can_proceed: true, + read_only_mode: false, + })) +} + +async fn confirm_delete( + State(state): State, + AxumPath(directory_id): AxumPath, + Json(payload): Json, +) -> Result, StatusCode> { + state + .read_only + .ensure_writable() + .map_err(|_| StatusCode::CONFLICT)?; + + let preview_id = Uuid::parse_str(&payload.preview_id) + .map_err(|_| StatusCode::BAD_REQUEST)?; + let _preview = state + .preview_store + .confirm(preview_id) + .map_err(|_| StatusCode::BAD_REQUEST)?; + + let directory_path = state + .untagged_queue + .resolve_directory(&directory_id) + .map_err(|_| StatusCode::BAD_REQUEST)?; + + let mut list_file_changes = Vec::new(); + if payload.remove_from_list_file { + let selected = payload + .selected_matches + .unwrap_or_else(|| { + directory_path + .file_name() + .and_then(|n| n.to_str()) + .map(|s| vec![s.to_string()]) + .unwrap_or_default() + }); + let mut entries = load_entries(&state.config.download_list_path) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + let _ = match_entries(&mut entries, &selected); + let (remaining, removed) = preview_removals(&entries); + apply_removals_atomic(&state.config.download_list_path, &remaining) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + list_file_changes = removed; + } + + let staged = state + .ops + .confirm_delete_directory(&directory_path, false, true) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + + let entry = state + .audit_log + .append_mutation( + "delete_directory", + vec![directory_path.display().to_string()], + list_file_changes.clone(), + "ok", + Some(preview_id), + ) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; + + let outcome = if staged.is_some() { "staged" } else { "deleted" }; + Ok(Json(DecisionResult { + outcome: outcome.to_string(), + audit_entry_id: entry.id.to_string(), + })) +} diff --git a/backend/src/config.rs b/backend/src/config.rs new file mode 100644 index 0000000..26c9369 --- /dev/null +++ b/backend/src/config.rs @@ -0,0 +1,126 @@ +use std::path::{Component, Path, PathBuf}; + +use serde::{Deserialize, Serialize}; + +use crate::error::{AppError, AppResult}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Config { + pub untagged_root: PathBuf, + pub whitelisted_root: PathBuf, + pub kept_root: PathBuf, + pub trash_root: PathBuf, + pub download_list_path: PathBuf, + pub audit_log_path: PathBuf, + pub state_db_path: PathBuf, + pub read_only_mode: bool, + pub hard_delete_enabled: bool, + pub excluded_patterns: Vec, +} + +impl Config { + pub fn from_env() -> AppResult { + let untagged_root = env_path("UNTAGGED_ROOT")?; + let whitelisted_root = env_path("WHITELISTED_ROOT")?; + let kept_root = env_path("KEPT_ROOT")?; + let trash_root = env_path("TRASH_ROOT")?; + let download_list_path = env_path("DOWNLOAD_LIST_PATH")?; + let audit_log_path = env_path("AUDIT_LOG_PATH")?; + let state_db_path = env_path("STATE_DB_PATH")?; + let read_only_mode = env_bool("READ_ONLY_MODE")?; + let hard_delete_enabled = env_bool("HARD_DELETE_ENABLED")?; + let excluded_patterns = std::env::var("EXCLUDED_PATTERNS") + .ok() + .map(|v| { + v.split(',') + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) + .collect() + }) + .unwrap_or_default(); + let config = Self { + untagged_root, + whitelisted_root, + kept_root, + trash_root, + download_list_path, + audit_log_path, + state_db_path, + read_only_mode, + hard_delete_enabled, + excluded_patterns, + }; + config.validate()?; + Ok(config) + } + + pub fn validate(&self) -> AppResult<()> { + let roots = [ + ("untagged_root", &self.untagged_root), + ("whitelisted_root", &self.whitelisted_root), + ("kept_root", &self.kept_root), + ("trash_root", &self.trash_root), + ]; + for (name, root) in roots.iter() { + if !root.is_absolute() { + return Err(AppError::InvalidConfig(format!( + "{name} must be an absolute path" + ))); + } + } + validate_non_overlapping_roots(&roots)?; + Ok(()) + } +} + +pub fn validate_non_overlapping_roots(roots: &[(&str, &PathBuf)]) -> AppResult<()> { + let mut normalized = Vec::with_capacity(roots.len()); + for (name, root) in roots.iter() { + let cleaned = normalize_path(root); + normalized.push(((*name).to_string(), cleaned)); + } + for i in 0..normalized.len() { + for j in (i + 1)..normalized.len() { + let (name_a, path_a) = &normalized[i]; + let (name_b, path_b) = &normalized[j]; + if path_a == path_b { + return Err(AppError::InvalidConfig(format!( + "{name_a} and {name_b} must be different" + ))); + } + if path_a.starts_with(path_b) || path_b.starts_with(path_a) { + return Err(AppError::InvalidConfig(format!( + "{name_a} and {name_b} must not overlap" + ))); + } + } + } + Ok(()) +} + +fn normalize_path(path: &Path) -> PathBuf { + let mut out = PathBuf::new(); + for component in path.components() { + match component { + Component::CurDir => {} + Component::ParentDir => { + out.pop(); + } + Component::RootDir | Component::Prefix(_) => out.push(component.as_os_str()), + Component::Normal(_) => out.push(component.as_os_str()), + } + } + out +} + +fn env_path(key: &str) -> AppResult { + let value = std::env::var(key) + .map_err(|_| AppError::InvalidConfig(format!("{key} is required")))?; + Ok(PathBuf::from(value)) +} + +fn env_bool(key: &str) -> AppResult { + let value = std::env::var(key) + .map_err(|_| AppError::InvalidConfig(format!("{key} is required")))?; + Ok(matches!(value.as_str(), "1" | "true" | "TRUE" | "yes" | "YES")) +} diff --git a/backend/src/error.rs b/backend/src/error.rs new file mode 100644 index 0000000..42a4e2b --- /dev/null +++ b/backend/src/error.rs @@ -0,0 +1,21 @@ +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum AppError { + #[error("invalid configuration: {0}")] + InvalidConfig(String), + #[error("read-only mode enabled")] + ReadOnly, + #[error("path outside configured roots: {0}")] + PathViolation(String), + #[error("whitelisted directory protected: {0}")] + WhitelistProtected(String), + #[error("io error: {0}")] + Io(#[from] std::io::Error), + #[error("serde json error: {0}")] + SerdeJson(#[from] serde_json::Error), + #[error("sqlx error: {0}")] + Sqlx(#[from] sqlx::Error), +} + +pub type AppResult = Result; diff --git a/backend/src/main.rs b/backend/src/main.rs new file mode 100644 index 0000000..4480119 --- /dev/null +++ b/backend/src/main.rs @@ -0,0 +1,50 @@ +mod api; +mod config; +mod error; +mod services; +mod state; + +use std::net::{IpAddr, SocketAddr}; + +use axum::{routing::get, Router}; +use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; + +use crate::config::Config; +use crate::state::AppState; + +#[tokio::main] +async fn main() -> Result<(), Box> { + tracing_subscriber::registry() + .with(tracing_subscriber::EnvFilter::from_default_env()) + .with(tracing_subscriber::fmt::layer()) + .init(); + + let bind_addr = std::env::var("BIND_ADDR").unwrap_or_else(|_| "127.0.0.1:8080".to_string()); + let socket_addr: SocketAddr = bind_addr.parse()?; + if !is_local_network(socket_addr.ip()) { + return Err("bind address must be loopback or private network".into()); + } + + let config = Config::from_env()?; + let state = AppState::new(config)?; + + let app = Router::new() + .route("/health", get(|| async { "OK" })) + .merge(api::untagged::router(state.clone())) + .merge(api::untagged_delete::router(state.clone())); + + tracing::info!("listening on {}", socket_addr); + let listener = tokio::net::TcpListener::bind(socket_addr).await?; + axum::serve(listener, app).await?; + Ok(()) +} + +fn is_local_network(ip: IpAddr) -> bool { + match ip { + IpAddr::V4(v4) => v4.is_loopback() + || v4.is_private() + || v4.is_link_local() + || v4.is_shared(), + IpAddr::V6(v6) => v6.is_loopback() || v6.is_unique_local() || v6.is_unicast_link_local(), + } +} diff --git a/backend/src/services/audit_log.rs b/backend/src/services/audit_log.rs new file mode 100644 index 0000000..b344e2d --- /dev/null +++ b/backend/src/services/audit_log.rs @@ -0,0 +1,62 @@ +use std::fs::OpenOptions; +use std::io::Write; +use std::path::PathBuf; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::error::AppResult; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct AuditEntry { + pub id: Uuid, + pub timestamp: DateTime, + pub action_type: String, + pub affected_paths: Vec, + pub list_file_changes: Vec, + pub outcome: String, + pub preview_id: Option, +} + +#[derive(Clone)] +pub struct AuditLog { + path: PathBuf, +} + +impl AuditLog { + pub fn new(path: PathBuf) -> Self { + Self { path } + } + + pub fn append(&self, entry: &AuditEntry) -> AppResult<()> { + let mut file = OpenOptions::new() + .create(true) + .append(true) + .open(&self.path)?; + let line = serde_json::to_string(entry)?; + writeln!(file, "{line}")?; + Ok(()) + } + + pub fn append_mutation( + &self, + action_type: &str, + affected_paths: Vec, + list_file_changes: Vec, + outcome: &str, + preview_id: Option, + ) -> AppResult { + let entry = AuditEntry { + id: Uuid::new_v4(), + timestamp: Utc::now(), + action_type: action_type.to_string(), + affected_paths, + list_file_changes, + outcome: outcome.to_string(), + preview_id, + }; + self.append(&entry)?; + Ok(entry) + } +} diff --git a/backend/src/services/collage_sampler.rs b/backend/src/services/collage_sampler.rs new file mode 100644 index 0000000..495a656 --- /dev/null +++ b/backend/src/services/collage_sampler.rs @@ -0,0 +1,83 @@ +use std::fs; +use std::path::{Path, PathBuf}; + +use rand::seq::SliceRandom; +use rand::thread_rng; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::error::AppResult; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MediaItem { + pub id: String, + pub user_directory_id: String, + pub relative_path: String, + pub size_bytes: u64, + pub media_type: String, +} + +#[derive(Clone, Default)] +pub struct CollageSampler; + +impl CollageSampler { + pub fn sample(&self, directory_id: &str, directory: &Path, count: usize) -> AppResult> { + let mut files = Vec::new(); + collect_media_files(directory, &mut files)?; + let mut rng = thread_rng(); + files.shuffle(&mut rng); + let samples = files.into_iter().take(count).map(|path| { + let relative_path = path + .strip_prefix(directory) + .unwrap_or(&path) + .to_string_lossy() + .to_string(); + let size_bytes = fs::metadata(&path).map(|m| m.len()).unwrap_or(0); + let media_type = media_type_for(&path); + MediaItem { + id: Uuid::new_v4().to_string(), + user_directory_id: directory_id.to_string(), + relative_path, + size_bytes, + media_type, + } + }); + Ok(samples.collect()) + } +} + +fn collect_media_files(dir: &Path, out: &mut Vec) -> AppResult<()> { + for entry in fs::read_dir(dir)? { + let entry = entry?; + let path = entry.path(); + let meta = entry.metadata()?; + if meta.is_dir() { + collect_media_files(&path, out)?; + } else if meta.is_file() && is_media_file(&path) { + out.push(path); + } + } + Ok(()) +} + +fn is_media_file(path: &Path) -> bool { + match path.extension().and_then(|e| e.to_str()).map(|e| e.to_lowercase()) { + Some(ext) => matches!( + ext.as_str(), + "jpg" | "jpeg" | "png" | "gif" | "webp" | "bmp" | "mp4" | "webm" | "mkv" | "mov" | "avi" + ), + None => false, + } +} + +fn media_type_for(path: &Path) -> String { + match path.extension().and_then(|e| e.to_str()).map(|e| e.to_lowercase()) { + Some(ext) if matches!(ext.as_str(), "jpg" | "jpeg" | "png" | "gif" | "webp" | "bmp") => { + "image".to_string() + } + Some(ext) if matches!(ext.as_str(), "mp4" | "webm" | "mkv" | "mov" | "avi") => { + "video".to_string() + } + _ => "other".to_string(), + } +} diff --git a/backend/src/services/list_file.rs b/backend/src/services/list_file.rs new file mode 100644 index 0000000..d2dbdeb --- /dev/null +++ b/backend/src/services/list_file.rs @@ -0,0 +1,85 @@ +use std::fs::{self, File}; +use std::io::{BufRead, BufReader, Write}; +use std::path::{Path, PathBuf}; + +use serde::{Deserialize, Serialize}; + +use crate::error::{AppError, AppResult}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DownloadListEntry { + pub raw_line: String, + pub normalized_value: String, + pub matched: bool, +} + +pub fn load_entries(path: &Path) -> AppResult> { + let file = File::open(path)?; + let reader = BufReader::new(file); + let mut entries = Vec::new(); + for line in reader.lines() { + let raw = line?; + let normalized = normalize_value(&raw); + if normalized.is_empty() { + continue; + } + entries.push(DownloadListEntry { + raw_line: raw, + normalized_value: normalized, + matched: false, + }); + } + Ok(entries) +} + +pub fn match_entries(entries: &mut [DownloadListEntry], targets: &[String]) -> Vec { + let normalized_targets: Vec = targets.iter().map(|t| normalize_value(t)).collect(); + let mut matched = Vec::new(); + for entry in entries.iter_mut() { + if normalized_targets.iter().any(|t| t == &entry.normalized_value) { + entry.matched = true; + matched.push(entry.clone()); + } + } + matched +} + +pub fn preview_removals(entries: &[DownloadListEntry]) -> (Vec, Vec) { + let mut remaining = Vec::new(); + let mut removed = Vec::new(); + for entry in entries { + if entry.matched { + removed.push(entry.raw_line.clone()); + } else { + remaining.push(entry.raw_line.clone()); + } + } + (remaining, removed) +} + +pub fn apply_removals_atomic(path: &Path, remaining_lines: &[String]) -> AppResult<()> { + let temp_path = temp_path_for(path)?; + { + let mut file = File::create(&temp_path)?; + for line in remaining_lines { + writeln!(file, "{line}")?; + } + } + fs::rename(temp_path, path)?; + Ok(()) +} + +pub fn normalize_value(value: &str) -> String { + value.trim().to_lowercase() +} + +fn temp_path_for(path: &Path) -> AppResult { + let parent = path + .parent() + .ok_or_else(|| AppError::InvalidConfig("list file has no parent".to_string()))?; + let file_name = path + .file_name() + .and_then(|n| n.to_str()) + .ok_or_else(|| AppError::InvalidConfig("list file has invalid name".to_string()))?; + Ok(parent.join(format!("{file_name}.tmp"))) +} diff --git a/backend/src/services/mod.rs b/backend/src/services/mod.rs new file mode 100644 index 0000000..62ec7e2 --- /dev/null +++ b/backend/src/services/mod.rs @@ -0,0 +1,10 @@ +pub mod audit_log; +pub mod collage_sampler; +pub mod list_file; +pub mod ops; +pub mod ops_lock; +pub mod path_guard; +pub mod preview_action; +pub mod read_only; +pub mod state_store; +pub mod untagged_queue; diff --git a/backend/src/services/ops.rs b/backend/src/services/ops.rs new file mode 100644 index 0000000..a3bf8da --- /dev/null +++ b/backend/src/services/ops.rs @@ -0,0 +1,130 @@ +use std::fs; +use std::path::{Path, PathBuf}; + +use uuid::Uuid; + +use crate::config::Config; +use crate::error::{AppError, AppResult}; +use crate::services::path_guard::PathGuard; + +#[derive(Clone)] +pub struct Ops { + path_guard: PathGuard, + whitelisted_root: PathBuf, + kept_root: PathBuf, + trash_root: PathBuf, + hard_delete_enabled: bool, +} + +impl Ops { + pub fn from_config(config: &Config, path_guard: PathGuard) -> AppResult { + config.validate()?; + Ok(Self { + path_guard, + whitelisted_root: config.whitelisted_root.clone(), + kept_root: config.kept_root.clone(), + trash_root: config.trash_root.clone(), + hard_delete_enabled: config.hard_delete_enabled, + }) + } + + pub fn move_dir(&self, from: &Path, to: &Path) -> AppResult<()> { + self.path_guard.ensure_within_roots(from)?; + self.path_guard.ensure_within_roots(to)?; + self.ensure_not_symlink(from)?; + fs::rename(from, to)?; + Ok(()) + } + + pub fn keep_directory(&self, path: &Path) -> AppResult { + self.path_guard.ensure_within_roots(path)?; + self.ensure_not_symlink(path)?; + let name = path + .file_name() + .and_then(|n| n.to_str()) + .ok_or_else(|| AppError::InvalidConfig("invalid path".to_string()))?; + let destination = self.kept_root.join(name); + self.move_dir(path, &destination)?; + Ok(destination) + } + + pub fn stage_delete_dir(&self, path: &Path) -> AppResult { + self.path_guard.ensure_within_roots(path)?; + self.ensure_not_whitelisted(path)?; + self.ensure_not_symlink(path)?; + let staged_path = self.staged_path(path)?; + fs::rename(path, &staged_path)?; + Ok(staged_path) + } + + pub fn stage_delete_file(&self, path: &Path) -> AppResult { + self.path_guard.ensure_within_roots(path)?; + self.ensure_not_symlink(path)?; + let staged_path = self.staged_path(path)?; + fs::rename(path, &staged_path)?; + Ok(staged_path) + } + + pub fn hard_delete_dir(&self, path: &Path, confirmed: bool) -> AppResult<()> { + self.path_guard.ensure_within_roots(path)?; + self.ensure_not_whitelisted(path)?; + self.ensure_not_symlink(path)?; + self.ensure_hard_delete_allowed(confirmed)?; + fs::remove_dir_all(path)?; + Ok(()) + } + + pub fn hard_delete_file(&self, path: &Path, confirmed: bool) -> AppResult<()> { + self.path_guard.ensure_within_roots(path)?; + self.ensure_not_symlink(path)?; + self.ensure_hard_delete_allowed(confirmed)?; + fs::remove_file(path)?; + Ok(()) + } + + pub fn confirm_delete_directory(&self, path: &Path, hard_delete: bool, confirmed: bool) -> AppResult> { + if hard_delete { + self.hard_delete_dir(path, confirmed)?; + Ok(None) + } else { + let staged = self.stage_delete_dir(path)?; + Ok(Some(staged)) + } + } + + fn ensure_not_whitelisted(&self, path: &Path) -> AppResult<()> { + if path.starts_with(&self.whitelisted_root) { + return Err(AppError::WhitelistProtected(path.display().to_string())); + } + Ok(()) + } + + fn ensure_hard_delete_allowed(&self, confirmed: bool) -> AppResult<()> { + if !self.hard_delete_enabled || !confirmed { + return Err(AppError::InvalidConfig( + "hard delete disabled or unconfirmed".to_string(), + )); + } + Ok(()) + } + + fn ensure_not_symlink(&self, path: &Path) -> AppResult<()> { + let metadata = fs::symlink_metadata(path)?; + if metadata.file_type().is_symlink() { + return Err(AppError::PathViolation(format!( + "symlink not allowed: {}", + path.display() + ))); + } + Ok(()) + } + + fn staged_path(&self, path: &Path) -> AppResult { + let name = path + .file_name() + .and_then(|n| n.to_str()) + .ok_or_else(|| AppError::InvalidConfig("invalid path".to_string()))?; + let suffix = Uuid::new_v4(); + Ok(self.trash_root.join(format!("{name}.{suffix}.staged"))) + } +} diff --git a/backend/src/services/ops_lock.rs b/backend/src/services/ops_lock.rs new file mode 100644 index 0000000..a72c135 --- /dev/null +++ b/backend/src/services/ops_lock.rs @@ -0,0 +1,18 @@ +use std::sync::Arc; + +use tokio::sync::{Mutex, MutexGuard}; + +#[derive(Clone, Default)] +pub struct OpsLock { + inner: Arc>, +} + +impl OpsLock { + pub fn new() -> Self { + Self::default() + } + + pub async fn acquire(&self) -> MutexGuard<'_, ()> { + self.inner.lock().await + } +} diff --git a/backend/src/services/path_guard.rs b/backend/src/services/path_guard.rs new file mode 100644 index 0000000..d299972 --- /dev/null +++ b/backend/src/services/path_guard.rs @@ -0,0 +1,48 @@ +use std::path::{Path, PathBuf}; + +use crate::config::Config; +use crate::error::{AppError, AppResult}; + +#[derive(Debug, Clone)] +pub struct PathGuard { + roots: Vec, +} + +impl PathGuard { + pub fn from_config(config: &Config) -> AppResult { + config.validate()?; + Ok(Self { + roots: vec![ + config.untagged_root.clone(), + config.whitelisted_root.clone(), + config.kept_root.clone(), + config.trash_root.clone(), + ], + }) + } + + pub fn ensure_within_roots(&self, path: &Path) -> AppResult<()> { + let normalized = normalize(path); + for root in &self.roots { + let root_norm = normalize(root); + if normalized.starts_with(&root_norm) { + return Ok(()); + } + } + Err(AppError::PathViolation(path.display().to_string())) + } +} + +fn normalize(path: &Path) -> PathBuf { + let mut out = PathBuf::new(); + for component in path.components() { + match component { + std::path::Component::CurDir => {} + std::path::Component::ParentDir => { + out.pop(); + } + _ => out.push(component.as_os_str()), + } + } + out +} diff --git a/backend/src/services/preview_action.rs b/backend/src/services/preview_action.rs new file mode 100644 index 0000000..23488cb --- /dev/null +++ b/backend/src/services/preview_action.rs @@ -0,0 +1,83 @@ +use std::collections::HashMap; +use std::sync::{Arc, Mutex}; + +use chrono::{DateTime, Duration, Utc}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::error::{AppError, AppResult}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum PreviewActionType { + DirectoryDelete, + FileDelete, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PreviewAction { + pub id: Uuid, + pub action_type: PreviewActionType, + pub target_paths: Vec, + pub list_file_changes_preview: Vec, + pub created_at: DateTime, + pub expires_at: DateTime, +} + +impl PreviewAction { + pub fn new(action_type: PreviewActionType, target_paths: Vec, list_file_changes: Vec) -> Self { + let created_at = Utc::now(); + let expires_at = created_at + Duration::minutes(15); + Self { + id: Uuid::new_v4(), + action_type, + target_paths, + list_file_changes_preview: list_file_changes, + created_at, + expires_at, + } + } + + pub fn is_expired(&self) -> bool { + Utc::now() > self.expires_at + } +} + +#[derive(Clone, Default)] +pub struct PreviewActionStore { + inner: Arc>>, +} + +impl PreviewActionStore { + pub fn new() -> Self { + Self::default() + } + + pub fn create(&self, action: PreviewAction) -> PreviewAction { + let mut guard = self.inner.lock().expect("preview store lock"); + guard.insert(action.id, action.clone()); + action + } + + pub fn get(&self, id: Uuid) -> AppResult { + let guard = self.inner.lock().expect("preview store lock"); + let action = guard + .get(&id) + .cloned() + .ok_or_else(|| AppError::InvalidConfig("preview action not found".to_string()))?; + if action.is_expired() { + return Err(AppError::InvalidConfig("preview action expired".to_string())); + } + Ok(action) + } + + pub fn confirm(&self, id: Uuid) -> AppResult { + let mut guard = self.inner.lock().expect("preview store lock"); + let action = guard + .remove(&id) + .ok_or_else(|| AppError::InvalidConfig("preview action not found".to_string()))?; + if action.is_expired() { + return Err(AppError::InvalidConfig("preview action expired".to_string())); + } + Ok(action) + } +} diff --git a/backend/src/services/read_only.rs b/backend/src/services/read_only.rs new file mode 100644 index 0000000..0df8880 --- /dev/null +++ b/backend/src/services/read_only.rs @@ -0,0 +1,27 @@ +use crate::config::Config; +use crate::error::{AppError, AppResult}; + +#[derive(Debug, Clone)] +pub struct ReadOnlyGuard { + read_only: bool, +} + +impl ReadOnlyGuard { + pub fn new(config: &Config) -> Self { + Self { + read_only: config.read_only_mode, + } + } + + pub fn ensure_writable(&self) -> AppResult<()> { + if self.read_only { + Err(AppError::ReadOnly) + } else { + Ok(()) + } + } + + pub fn ensure_writable_for_operation(&self, _operation: &str) -> AppResult<()> { + self.ensure_writable() + } +} diff --git a/backend/src/services/state_store.rs b/backend/src/services/state_store.rs new file mode 100644 index 0000000..5fb62cd --- /dev/null +++ b/backend/src/services/state_store.rs @@ -0,0 +1,23 @@ +use sqlx::sqlite::{SqliteConnectOptions, SqlitePool}; +use std::str::FromStr; + +use crate::config::Config; +use crate::error::AppResult; + +#[derive(Clone)] +pub struct StateStore { + pool: SqlitePool, +} + +impl StateStore { + pub async fn connect(config: &Config) -> AppResult { + let options = SqliteConnectOptions::from_str(&config.state_db_path.to_string_lossy())? + .create_if_missing(true); + let pool = SqlitePool::connect_with(options).await?; + Ok(Self { pool }) + } + + pub fn pool(&self) -> &SqlitePool { + &self.pool + } +} diff --git a/backend/src/services/untagged_queue.rs b/backend/src/services/untagged_queue.rs new file mode 100644 index 0000000..0f9583b --- /dev/null +++ b/backend/src/services/untagged_queue.rs @@ -0,0 +1,97 @@ +use std::fs; +use std::path::{Path, PathBuf}; + +use serde::{Deserialize, Serialize}; + +use crate::config::Config; +use crate::error::{AppError, AppResult}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UntaggedDirectory { + pub id: String, + pub name: String, + pub absolute_path: PathBuf, + pub total_size_bytes: u64, + pub file_count: u64, +} + +#[derive(Clone)] +pub struct UntaggedQueue { + root: PathBuf, +} + +impl UntaggedQueue { + pub fn new(config: &Config) -> AppResult { + config.validate()?; + Ok(Self { + root: config.untagged_root.clone(), + }) + } + + pub fn next_directory(&self) -> AppResult> { + let mut dirs: Vec = fs::read_dir(&self.root)? + .filter_map(|entry| entry.ok()) + .map(|entry| entry.path()) + .filter(|path| path.is_dir()) + .collect(); + dirs.sort(); + let path = match dirs.first() { + Some(path) => path.to_path_buf(), + None => return Ok(None), + }; + let name = path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or_default() + .to_string(); + let id = relative_id(&self.root, &path)?; + let (total_size_bytes, file_count) = dir_stats(&path)?; + Ok(Some(UntaggedDirectory { + id, + name, + absolute_path: path, + total_size_bytes, + file_count, + })) + } + + pub fn resolve_directory(&self, id: &str) -> AppResult { + ensure_safe_id(id)?; + Ok(self.root.join(id)) + } +} + +fn dir_stats(path: &Path) -> AppResult<(u64, u64)> { + let mut total_size = 0u64; + let mut file_count = 0u64; + for entry in fs::read_dir(path)? { + let entry = entry?; + let meta = entry.metadata()?; + if meta.is_dir() { + let (size, count) = dir_stats(&entry.path())?; + total_size += size; + file_count += count; + } else if meta.is_file() { + total_size += meta.len(); + file_count += 1; + } + } + Ok((total_size, file_count)) +} + +fn relative_id(root: &Path, path: &Path) -> AppResult { + let rel = path + .strip_prefix(root) + .map_err(|_| AppError::InvalidConfig("path outside untagged root".to_string()))?; + Ok(rel.to_string_lossy().to_string()) +} + +fn ensure_safe_id(id: &str) -> AppResult<()> { + let path = Path::new(id); + for component in path.components() { + if matches!(component, std::path::Component::ParentDir) { + return Err(AppError::InvalidConfig("invalid directory id".to_string())); + } + } + Ok(()) +} diff --git a/backend/src/state.rs b/backend/src/state.rs new file mode 100644 index 0000000..1c6510d --- /dev/null +++ b/backend/src/state.rs @@ -0,0 +1,42 @@ +use std::sync::Arc; + +use crate::config::Config; +use crate::error::AppResult; +use crate::services::{ + audit_log::AuditLog, collage_sampler::CollageSampler, ops::Ops, path_guard::PathGuard, + preview_action::PreviewActionStore, read_only::ReadOnlyGuard, untagged_queue::UntaggedQueue, +}; + +#[derive(Clone)] +pub struct AppState { + pub config: Arc, + pub path_guard: PathGuard, + pub ops: Ops, + pub read_only: ReadOnlyGuard, + pub audit_log: AuditLog, + pub preview_store: PreviewActionStore, + pub untagged_queue: UntaggedQueue, + pub collage_sampler: CollageSampler, +} + +impl AppState { + pub fn new(config: Config) -> AppResult { + let path_guard = PathGuard::from_config(&config)?; + let ops = Ops::from_config(&config, path_guard.clone())?; + let read_only = ReadOnlyGuard::new(&config); + let audit_log = AuditLog::new(config.audit_log_path.clone()); + let preview_store = PreviewActionStore::new(); + let untagged_queue = UntaggedQueue::new(&config)?; + let collage_sampler = CollageSampler::default(); + Ok(Self { + config: Arc::new(config), + path_guard, + ops, + read_only, + audit_log, + preview_store, + untagged_queue, + collage_sampler, + }) + } +} diff --git a/frontend/.prettierrc b/frontend/.prettierrc new file mode 100644 index 0000000..650cb88 --- /dev/null +++ b/frontend/.prettierrc @@ -0,0 +1,4 @@ +{ + "singleQuote": true, + "semi": true +} diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..c30536c --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,21 @@ +{ + "name": "archive-curator-frontend", + "version": "0.1.0", + "private": true, + "type": "module", + "scripts": { + "dev": "vite dev", + "build": "vite build", + "preview": "vite preview", + "lint": "eslint .", + "format": "prettier --write ." + }, + "devDependencies": { + "@sveltejs/kit": "^2.0.0", + "@sveltejs/vite-plugin-svelte": "^3.0.0", + "eslint": "^9.0.0", + "prettier": "^3.0.0", + "svelte": "^5.0.0", + "vite": "^5.0.0" + } +} diff --git a/frontend/src/components/list-file-matches.svelte b/frontend/src/components/list-file-matches.svelte new file mode 100644 index 0000000..c0afa46 --- /dev/null +++ b/frontend/src/components/list-file-matches.svelte @@ -0,0 +1,55 @@ + + +
+

List-file matches

+ {#if matches.length === 0} +

No matches detected.

+ {:else} +
    + {#each matches as match} +
  • + +
  • + {/each} +
+ {/if} +
+ + diff --git a/frontend/src/components/untagged-controls.svelte b/frontend/src/components/untagged-controls.svelte new file mode 100644 index 0000000..a8acf14 --- /dev/null +++ b/frontend/src/components/untagged-controls.svelte @@ -0,0 +1,41 @@ + + +
+ + + + +
+ + diff --git a/frontend/src/pages/untagged-collage.svelte b/frontend/src/pages/untagged-collage.svelte new file mode 100644 index 0000000..bfbabcb --- /dev/null +++ b/frontend/src/pages/untagged-collage.svelte @@ -0,0 +1,210 @@ + + +
+
+

Untagged Collage Review

+

Curate directories quickly, with staged deletes and list-file previews.

+
+ + {#if collage} +
+

{collage.directory_name}

+
+ {collage.file_count} files + {Math.round(collage.total_size_bytes / (1024 * 1024))} MB +
+
+ +
+ {#each collage.samples as item} +
+
{item.media_type}
+
{item.relative_path}
+
+ {/each} +
+ + + + {#if preview} + + {/if} + {:else} +

{statusMessage}

+ {/if} + + {#if statusMessage && collage} +

{statusMessage}

+ {/if} +
+ + diff --git a/frontend/src/services/untagged_api.ts b/frontend/src/services/untagged_api.ts new file mode 100644 index 0000000..07f7b39 --- /dev/null +++ b/frontend/src/services/untagged_api.ts @@ -0,0 +1,70 @@ +export type MediaItem = { + id: string; + user_directory_id: string; + relative_path: string; + size_bytes: number; + media_type: string; +}; + +export type UntaggedCollage = { + directory_id: string; + directory_name: string; + total_size_bytes: number; + file_count: number; + samples: MediaItem[]; +}; + +export type DeletePreview = { + preview_id: string; + target_paths: string[]; + list_file_changes_preview: string[]; + can_proceed: boolean; + read_only_mode: boolean; +}; + +export type DeleteConfirm = { + preview_id: string; + remove_from_list_file: boolean; + selected_matches?: string[]; +}; + +export type DecisionResult = { + outcome: string; + audit_entry_id: string; +}; + +const API_BASE = import.meta.env.VITE_API_BASE ?? ''; + +async function request(path: string, options?: RequestInit): Promise { + const response = await fetch(`${API_BASE}${path}`, { + headers: { 'Content-Type': 'application/json' }, + ...options, + }); + if (!response.ok) { + throw new Error(`Request failed: ${response.status}`); + } + return (await response.json()) as T; +} + +export function fetchNextUntagged(): Promise { + return request('/directories/untagged/next'); +} + +export function resampleCollage(directoryId: string): Promise { + return request(`/directories/untagged/${directoryId}/resample`, { method: 'POST' }); +} + +export function keepDirectory(directoryId: string): Promise { + return request(`/directories/untagged/${directoryId}/keep`, { method: 'POST' }); +} + +export function previewDelete(directoryId: string): Promise { + return request(`/directories/untagged/${directoryId}/preview-delete`, { method: 'POST' }); +} + +export function confirmDelete(directoryId: string, payload: DeleteConfirm): Promise { + return request(`/directories/untagged/${directoryId}/confirm-delete`, { + method: 'POST', + body: JSON.stringify(payload), + }); +} diff --git a/specs/001-archive-curator/tasks.md b/specs/001-archive-curator/tasks.md index 2c9a4ae..03c4fa6 100644 --- a/specs/001-archive-curator/tasks.md +++ b/specs/001-archive-curator/tasks.md @@ -29,11 +29,11 @@ description: "Task list template for feature implementation" **Purpose**: Project initialization and basic structure -- [ ] T001 Create backend and frontend directory structure in `backend/src/` and `frontend/src/` -- [ ] T002 Initialize Rust backend crate in `backend/Cargo.toml` -- [ ] T003 Initialize SvelteKit frontend in `frontend/package.json` -- [ ] T004 [P] Add repository-wide formatting and lint configs in `backend/rustfmt.toml` and `frontend/.prettierrc` -- [ ] T005 Add base README for local run notes in `README.md` +- [X] T001 Create backend and frontend directory structure in `backend/src/` and `frontend/src/` +- [X] T002 Initialize Rust backend crate in `backend/Cargo.toml` +- [X] T003 Initialize SvelteKit frontend in `frontend/package.json` +- [X] T004 [P] Add repository-wide formatting and lint configs in `backend/rustfmt.toml` and `frontend/.prettierrc` +- [X] T005 Add base README for local run notes in `README.md` --- @@ -43,17 +43,17 @@ description: "Task list template for feature implementation" **⚠️ CRITICAL**: No user story work can begin until this phase is complete -- [ ] T006 Implement configuration model and validation in `backend/src/config.rs` -- [ ] T006a Implement root non-overlap validation (fail-fast) in `backend/src/config.rs` -- [ ] T007 Implement root boundary validation helpers in `backend/src/services/path_guard.rs` -- [ ] T008 Implement read-only mode enforcement guard in `backend/src/services/read_only.rs` -- [ ] T009 Implement state storage access layer in `backend/src/services/state_store.rs` -- [ ] T010 Implement audit log append-only writer in `backend/src/services/audit_log.rs` -- [ ] T011 Implement list-file parser and matcher in `backend/src/services/list_file.rs` -- [ ] T012 Implement preview/confirm action model in `backend/src/services/preview_action.rs` -- [ ] T013 Implement filesystem operations facade in `backend/src/services/ops.rs` -- [ ] T014 Add HTTP server bootstrap and routing in `backend/src/main.rs` -- [ ] T014a Enforce bind address defaults/local-network restriction in `backend/src/main.rs` +- [X] T006 Implement configuration model and validation in `backend/src/config.rs` +- [X] T006a Implement root non-overlap validation (fail-fast) in `backend/src/config.rs` +- [X] T007 Implement root boundary validation helpers in `backend/src/services/path_guard.rs` +- [X] T008 Implement read-only mode enforcement guard in `backend/src/services/read_only.rs` +- [X] T009 Implement state storage access layer in `backend/src/services/state_store.rs` +- [X] T010 Implement audit log append-only writer in `backend/src/services/audit_log.rs` +- [X] T011 Implement list-file parser and matcher in `backend/src/services/list_file.rs` +- [X] T012 Implement preview/confirm action model in `backend/src/services/preview_action.rs` +- [X] T013 Implement filesystem operations facade in `backend/src/services/ops.rs` +- [X] T014 Add HTTP server bootstrap and routing in `backend/src/main.rs` +- [X] T014a Enforce bind address defaults/local-network restriction in `backend/src/main.rs` **Checkpoint**: Foundation ready - user story implementation can now begin in parallel @@ -63,16 +63,16 @@ description: "Task list template for feature implementation" **Purpose**: Enforce constitution safety guarantees before any deletion work -- [ ] T015 Implement global read-only mode block in `backend/src/services/read_only.rs` -- [ ] T016 Enforce root-path boundaries for all filesystem operations in `backend/src/services/path_guard.rs` -- [ ] T017 Implement single-writer guard for destructive operations in `backend/src/services/ops_lock.rs` -- [ ] T018 Implement dry-run preview + explicit confirmation flow in `backend/src/services/preview_action.rs` -- [ ] T019 Implement two-stage deletion (trash/staging) in `backend/src/services/ops.rs` -- [ ] T019a Enforce hard-delete disabled by default and require explicit config + confirmation in `backend/src/services/ops.rs` -- [ ] T020 Enforce symlink-safe deletion in `backend/src/services/ops.rs` -- [ ] T021 Append-only audit log for every mutation in `backend/src/services/audit_log.rs` -- [ ] T022 Enforce whitelist protection for directory-level actions in `backend/src/services/ops.rs` -- [ ] T023 Implement list-file edit preview + atomic write in `backend/src/services/list_file.rs` +- [X] T015 Implement global read-only mode block in `backend/src/services/read_only.rs` +- [X] T016 Enforce root-path boundaries for all filesystem operations in `backend/src/services/path_guard.rs` +- [X] T017 Implement single-writer guard for destructive operations in `backend/src/services/ops_lock.rs` +- [X] T018 Implement dry-run preview + explicit confirmation flow in `backend/src/services/preview_action.rs` +- [X] T019 Implement two-stage deletion (trash/staging) in `backend/src/services/ops.rs` +- [X] T019a Enforce hard-delete disabled by default and require explicit config + confirmation in `backend/src/services/ops.rs` +- [X] T020 Enforce symlink-safe deletion in `backend/src/services/ops.rs` +- [X] T021 Append-only audit log for every mutation in `backend/src/services/audit_log.rs` +- [X] T022 Enforce whitelist protection for directory-level actions in `backend/src/services/ops.rs` +- [X] T023 Implement list-file edit preview + atomic write in `backend/src/services/list_file.rs` **Checkpoint**: Safety guarantees verified - destructive workflows can now begin @@ -87,18 +87,18 @@ preview delete with list-file matches, and confirm delete with audit entry ### Implementation for User Story 1 -- [ ] T024 [P] [US1] Implement untagged directory selection service in `backend/src/services/untagged_queue.rs` -- [ ] T025 [P] [US1] Implement collage sampler in `backend/src/services/collage_sampler.rs` -- [ ] T026 [US1] Implement keep decision (move to kept root) in `backend/src/services/ops.rs` -- [ ] T027 [US1] Implement delete preview for untagged directories in `backend/src/services/preview_action.rs` -- [ ] T028 [US1] Implement delete confirmation for untagged directories in `backend/src/services/ops.rs` -- [ ] T029 [P] [US1] Add API endpoints for untagged review in `backend/src/api/untagged.rs` -- [ ] T030 [P] [US1] Add API endpoints for untagged delete preview/confirm in `backend/src/api/untagged_delete.rs` -- [ ] T030a [P] [US1] Add list-file match selection payload handling in `backend/src/api/untagged_delete.rs` -- [ ] T031 [P] [US1] Create collage UI page in `frontend/src/pages/untagged-collage.svelte` -- [ ] T032 [P] [US1] Create resample and decision controls in `frontend/src/components/untagged-controls.svelte` -- [ ] T032a [P] [US1] Add list-file match selection UI in `frontend/src/components/list-file-matches.svelte` -- [ ] T033 [US1] Wire untagged review API client in `frontend/src/services/untagged_api.ts` +- [X] T024 [P] [US1] Implement untagged directory selection service in `backend/src/services/untagged_queue.rs` +- [X] T025 [P] [US1] Implement collage sampler in `backend/src/services/collage_sampler.rs` +- [X] T026 [US1] Implement keep decision (move to kept root) in `backend/src/services/ops.rs` +- [X] T027 [US1] Implement delete preview for untagged directories in `backend/src/services/preview_action.rs` +- [X] T028 [US1] Implement delete confirmation for untagged directories in `backend/src/services/ops.rs` +- [X] T029 [P] [US1] Add API endpoints for untagged review in `backend/src/api/untagged.rs` +- [X] T030 [P] [US1] Add API endpoints for untagged delete preview/confirm in `backend/src/api/untagged_delete.rs` +- [X] T030a [P] [US1] Add list-file match selection payload handling in `backend/src/api/untagged_delete.rs` +- [X] T031 [P] [US1] Create collage UI page in `frontend/src/pages/untagged-collage.svelte` +- [X] T032 [P] [US1] Create resample and decision controls in `frontend/src/components/untagged-controls.svelte` +- [X] T032a [P] [US1] Add list-file match selection UI in `frontend/src/components/list-file-matches.svelte` +- [X] T033 [US1] Wire untagged review API client in `frontend/src/services/untagged_api.ts` **Checkpoint**: User Story 1 fully functional and independently testable