wowaweewa
This commit is contained in:
18
backend/Cargo.toml
Normal file
18
backend/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "archive-curator-backend"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
axum = "0.7"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
sqlx = { version = "0.7", features = ["runtime-tokio", "sqlite", "macros"] }
|
||||
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt"] }
|
||||
thiserror = "1"
|
||||
anyhow = "1"
|
||||
uuid = { version = "1", features = ["v4", "serde"] }
|
||||
rand = "0.8"
|
||||
1
backend/rustfmt.toml
Normal file
1
backend/rustfmt.toml
Normal file
@@ -0,0 +1 @@
|
||||
edition = "2021"
|
||||
2
backend/src/api/mod.rs
Normal file
2
backend/src/api/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub mod untagged;
|
||||
pub mod untagged_delete;
|
||||
145
backend/src/api/untagged.rs
Normal file
145
backend/src/api/untagged.rs
Normal file
@@ -0,0 +1,145 @@
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use axum::{
|
||||
extract::{Path as AxumPath, State},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
routing::{get, post},
|
||||
Json, Router,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::services::collage_sampler::MediaItem;
|
||||
use crate::state::AppState;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct UntaggedCollage {
|
||||
pub directory_id: String,
|
||||
pub directory_name: String,
|
||||
pub total_size_bytes: u64,
|
||||
pub file_count: u64,
|
||||
pub samples: Vec<MediaItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct DecisionResult {
|
||||
pub outcome: String,
|
||||
pub audit_entry_id: String,
|
||||
}
|
||||
|
||||
pub fn router(state: AppState) -> Router {
|
||||
Router::new()
|
||||
.route("/directories/untagged/next", get(next_untagged))
|
||||
.route(
|
||||
"/directories/untagged/:directory_id/resample",
|
||||
post(resample_collage),
|
||||
)
|
||||
.route(
|
||||
"/directories/untagged/:directory_id/keep",
|
||||
post(keep_directory),
|
||||
)
|
||||
.with_state(state)
|
||||
}
|
||||
|
||||
async fn next_untagged(State(state): State<AppState>) -> Result<Json<UntaggedCollage>, StatusCode> {
|
||||
let directory = state
|
||||
.untagged_queue
|
||||
.next_directory()
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
|
||||
.ok_or(StatusCode::NOT_FOUND)?;
|
||||
|
||||
let samples = state
|
||||
.collage_sampler
|
||||
.sample(&directory.id, &directory.absolute_path, 12)
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
Ok(Json(UntaggedCollage {
|
||||
directory_id: directory.id,
|
||||
directory_name: directory.name,
|
||||
total_size_bytes: directory.total_size_bytes,
|
||||
file_count: directory.file_count,
|
||||
samples,
|
||||
}))
|
||||
}
|
||||
|
||||
async fn resample_collage(
|
||||
State(state): State<AppState>,
|
||||
AxumPath(directory_id): AxumPath<String>,
|
||||
) -> Result<Json<UntaggedCollage>, StatusCode> {
|
||||
let directory_path = state
|
||||
.untagged_queue
|
||||
.resolve_directory(&directory_id)
|
||||
.map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||
let directory_name = directory_path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.unwrap_or_default()
|
||||
.to_string();
|
||||
let (total_size_bytes, file_count) = dir_stats(&directory_path)
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
let samples = state
|
||||
.collage_sampler
|
||||
.sample(&directory_id, &directory_path, 12)
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
Ok(Json(UntaggedCollage {
|
||||
directory_id,
|
||||
directory_name,
|
||||
total_size_bytes,
|
||||
file_count,
|
||||
samples,
|
||||
}))
|
||||
}
|
||||
|
||||
async fn keep_directory(
|
||||
State(state): State<AppState>,
|
||||
AxumPath(directory_id): AxumPath<String>,
|
||||
) -> Result<impl IntoResponse, StatusCode> {
|
||||
state
|
||||
.read_only
|
||||
.ensure_writable()
|
||||
.map_err(|_| StatusCode::CONFLICT)?;
|
||||
let directory_path = state
|
||||
.untagged_queue
|
||||
.resolve_directory(&directory_id)
|
||||
.map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||
let destination = state
|
||||
.ops
|
||||
.keep_directory(&directory_path)
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
let entry = state
|
||||
.audit_log
|
||||
.append_mutation(
|
||||
"keep_directory",
|
||||
vec![directory_path.display().to_string(), destination.display().to_string()],
|
||||
Vec::new(),
|
||||
"ok",
|
||||
None,
|
||||
)
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
Ok(Json(DecisionResult {
|
||||
outcome: "kept".to_string(),
|
||||
audit_entry_id: entry.id.to_string(),
|
||||
}))
|
||||
}
|
||||
|
||||
fn dir_stats(path: &Path) -> std::io::Result<(u64, u64)> {
|
||||
let mut total_size = 0u64;
|
||||
let mut file_count = 0u64;
|
||||
for entry in fs::read_dir(path)? {
|
||||
let entry = entry?;
|
||||
let meta = entry.metadata()?;
|
||||
if meta.is_dir() {
|
||||
let (size, count) = dir_stats(&entry.path())?;
|
||||
total_size += size;
|
||||
file_count += count;
|
||||
} else if meta.is_file() {
|
||||
total_size += meta.len();
|
||||
file_count += 1;
|
||||
}
|
||||
}
|
||||
Ok((total_size, file_count))
|
||||
}
|
||||
153
backend/src/api/untagged_delete.rs
Normal file
153
backend/src/api/untagged_delete.rs
Normal file
@@ -0,0 +1,153 @@
|
||||
use axum::{
|
||||
extract::{Path as AxumPath, State},
|
||||
http::StatusCode,
|
||||
routing::post,
|
||||
Json, Router,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::services::list_file::{apply_removals_atomic, load_entries, match_entries, preview_removals};
|
||||
use crate::services::preview_action::{PreviewAction, PreviewActionType};
|
||||
use crate::state::AppState;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct DeletePreview {
|
||||
pub preview_id: String,
|
||||
pub target_paths: Vec<String>,
|
||||
pub list_file_changes_preview: Vec<String>,
|
||||
pub can_proceed: bool,
|
||||
pub read_only_mode: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct DeleteConfirm {
|
||||
pub preview_id: String,
|
||||
pub remove_from_list_file: bool,
|
||||
#[serde(default)]
|
||||
pub selected_matches: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct DecisionResult {
|
||||
pub outcome: String,
|
||||
pub audit_entry_id: String,
|
||||
}
|
||||
|
||||
pub fn router(state: AppState) -> Router {
|
||||
Router::new()
|
||||
.route(
|
||||
"/directories/untagged/:directory_id/preview-delete",
|
||||
post(preview_delete),
|
||||
)
|
||||
.route(
|
||||
"/directories/untagged/:directory_id/confirm-delete",
|
||||
post(confirm_delete),
|
||||
)
|
||||
.with_state(state)
|
||||
}
|
||||
|
||||
async fn preview_delete(
|
||||
State(state): State<AppState>,
|
||||
AxumPath(directory_id): AxumPath<String>,
|
||||
) -> Result<Json<DeletePreview>, StatusCode> {
|
||||
state
|
||||
.read_only
|
||||
.ensure_writable()
|
||||
.map_err(|_| StatusCode::CONFLICT)?;
|
||||
|
||||
let directory_path = state
|
||||
.untagged_queue
|
||||
.resolve_directory(&directory_id)
|
||||
.map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||
let directory_name = directory_path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.unwrap_or_default()
|
||||
.to_string();
|
||||
|
||||
let mut entries = load_entries(&state.config.download_list_path)
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
let _ = match_entries(&mut entries, &[directory_name]);
|
||||
let (_remaining, removed) = preview_removals(&entries);
|
||||
|
||||
let action = PreviewAction::new(
|
||||
PreviewActionType::DirectoryDelete,
|
||||
vec![directory_path.display().to_string()],
|
||||
removed.clone(),
|
||||
);
|
||||
let action = state.preview_store.create(action);
|
||||
|
||||
Ok(Json(DeletePreview {
|
||||
preview_id: action.id.to_string(),
|
||||
target_paths: action.target_paths,
|
||||
list_file_changes_preview: action.list_file_changes_preview,
|
||||
can_proceed: true,
|
||||
read_only_mode: false,
|
||||
}))
|
||||
}
|
||||
|
||||
async fn confirm_delete(
|
||||
State(state): State<AppState>,
|
||||
AxumPath(directory_id): AxumPath<String>,
|
||||
Json(payload): Json<DeleteConfirm>,
|
||||
) -> Result<Json<DecisionResult>, StatusCode> {
|
||||
state
|
||||
.read_only
|
||||
.ensure_writable()
|
||||
.map_err(|_| StatusCode::CONFLICT)?;
|
||||
|
||||
let preview_id = Uuid::parse_str(&payload.preview_id)
|
||||
.map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||
let _preview = state
|
||||
.preview_store
|
||||
.confirm(preview_id)
|
||||
.map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||
|
||||
let directory_path = state
|
||||
.untagged_queue
|
||||
.resolve_directory(&directory_id)
|
||||
.map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||
|
||||
let mut list_file_changes = Vec::new();
|
||||
if payload.remove_from_list_file {
|
||||
let selected = payload
|
||||
.selected_matches
|
||||
.unwrap_or_else(|| {
|
||||
directory_path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.map(|s| vec![s.to_string()])
|
||||
.unwrap_or_default()
|
||||
});
|
||||
let mut entries = load_entries(&state.config.download_list_path)
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
let _ = match_entries(&mut entries, &selected);
|
||||
let (remaining, removed) = preview_removals(&entries);
|
||||
apply_removals_atomic(&state.config.download_list_path, &remaining)
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
list_file_changes = removed;
|
||||
}
|
||||
|
||||
let staged = state
|
||||
.ops
|
||||
.confirm_delete_directory(&directory_path, false, true)
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
let entry = state
|
||||
.audit_log
|
||||
.append_mutation(
|
||||
"delete_directory",
|
||||
vec![directory_path.display().to_string()],
|
||||
list_file_changes.clone(),
|
||||
"ok",
|
||||
Some(preview_id),
|
||||
)
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
let outcome = if staged.is_some() { "staged" } else { "deleted" };
|
||||
Ok(Json(DecisionResult {
|
||||
outcome: outcome.to_string(),
|
||||
audit_entry_id: entry.id.to_string(),
|
||||
}))
|
||||
}
|
||||
126
backend/src/config.rs
Normal file
126
backend/src/config.rs
Normal file
@@ -0,0 +1,126 @@
|
||||
use std::path::{Component, Path, PathBuf};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::error::{AppError, AppResult};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Config {
|
||||
pub untagged_root: PathBuf,
|
||||
pub whitelisted_root: PathBuf,
|
||||
pub kept_root: PathBuf,
|
||||
pub trash_root: PathBuf,
|
||||
pub download_list_path: PathBuf,
|
||||
pub audit_log_path: PathBuf,
|
||||
pub state_db_path: PathBuf,
|
||||
pub read_only_mode: bool,
|
||||
pub hard_delete_enabled: bool,
|
||||
pub excluded_patterns: Vec<String>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn from_env() -> AppResult<Self> {
|
||||
let untagged_root = env_path("UNTAGGED_ROOT")?;
|
||||
let whitelisted_root = env_path("WHITELISTED_ROOT")?;
|
||||
let kept_root = env_path("KEPT_ROOT")?;
|
||||
let trash_root = env_path("TRASH_ROOT")?;
|
||||
let download_list_path = env_path("DOWNLOAD_LIST_PATH")?;
|
||||
let audit_log_path = env_path("AUDIT_LOG_PATH")?;
|
||||
let state_db_path = env_path("STATE_DB_PATH")?;
|
||||
let read_only_mode = env_bool("READ_ONLY_MODE")?;
|
||||
let hard_delete_enabled = env_bool("HARD_DELETE_ENABLED")?;
|
||||
let excluded_patterns = std::env::var("EXCLUDED_PATTERNS")
|
||||
.ok()
|
||||
.map(|v| {
|
||||
v.split(',')
|
||||
.map(|s| s.trim().to_string())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let config = Self {
|
||||
untagged_root,
|
||||
whitelisted_root,
|
||||
kept_root,
|
||||
trash_root,
|
||||
download_list_path,
|
||||
audit_log_path,
|
||||
state_db_path,
|
||||
read_only_mode,
|
||||
hard_delete_enabled,
|
||||
excluded_patterns,
|
||||
};
|
||||
config.validate()?;
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
pub fn validate(&self) -> AppResult<()> {
|
||||
let roots = [
|
||||
("untagged_root", &self.untagged_root),
|
||||
("whitelisted_root", &self.whitelisted_root),
|
||||
("kept_root", &self.kept_root),
|
||||
("trash_root", &self.trash_root),
|
||||
];
|
||||
for (name, root) in roots.iter() {
|
||||
if !root.is_absolute() {
|
||||
return Err(AppError::InvalidConfig(format!(
|
||||
"{name} must be an absolute path"
|
||||
)));
|
||||
}
|
||||
}
|
||||
validate_non_overlapping_roots(&roots)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn validate_non_overlapping_roots(roots: &[(&str, &PathBuf)]) -> AppResult<()> {
|
||||
let mut normalized = Vec::with_capacity(roots.len());
|
||||
for (name, root) in roots.iter() {
|
||||
let cleaned = normalize_path(root);
|
||||
normalized.push(((*name).to_string(), cleaned));
|
||||
}
|
||||
for i in 0..normalized.len() {
|
||||
for j in (i + 1)..normalized.len() {
|
||||
let (name_a, path_a) = &normalized[i];
|
||||
let (name_b, path_b) = &normalized[j];
|
||||
if path_a == path_b {
|
||||
return Err(AppError::InvalidConfig(format!(
|
||||
"{name_a} and {name_b} must be different"
|
||||
)));
|
||||
}
|
||||
if path_a.starts_with(path_b) || path_b.starts_with(path_a) {
|
||||
return Err(AppError::InvalidConfig(format!(
|
||||
"{name_a} and {name_b} must not overlap"
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn normalize_path(path: &Path) -> PathBuf {
|
||||
let mut out = PathBuf::new();
|
||||
for component in path.components() {
|
||||
match component {
|
||||
Component::CurDir => {}
|
||||
Component::ParentDir => {
|
||||
out.pop();
|
||||
}
|
||||
Component::RootDir | Component::Prefix(_) => out.push(component.as_os_str()),
|
||||
Component::Normal(_) => out.push(component.as_os_str()),
|
||||
}
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
fn env_path(key: &str) -> AppResult<PathBuf> {
|
||||
let value = std::env::var(key)
|
||||
.map_err(|_| AppError::InvalidConfig(format!("{key} is required")))?;
|
||||
Ok(PathBuf::from(value))
|
||||
}
|
||||
|
||||
fn env_bool(key: &str) -> AppResult<bool> {
|
||||
let value = std::env::var(key)
|
||||
.map_err(|_| AppError::InvalidConfig(format!("{key} is required")))?;
|
||||
Ok(matches!(value.as_str(), "1" | "true" | "TRUE" | "yes" | "YES"))
|
||||
}
|
||||
21
backend/src/error.rs
Normal file
21
backend/src/error.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum AppError {
|
||||
#[error("invalid configuration: {0}")]
|
||||
InvalidConfig(String),
|
||||
#[error("read-only mode enabled")]
|
||||
ReadOnly,
|
||||
#[error("path outside configured roots: {0}")]
|
||||
PathViolation(String),
|
||||
#[error("whitelisted directory protected: {0}")]
|
||||
WhitelistProtected(String),
|
||||
#[error("io error: {0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
#[error("serde json error: {0}")]
|
||||
SerdeJson(#[from] serde_json::Error),
|
||||
#[error("sqlx error: {0}")]
|
||||
Sqlx(#[from] sqlx::Error),
|
||||
}
|
||||
|
||||
pub type AppResult<T> = Result<T, AppError>;
|
||||
50
backend/src/main.rs
Normal file
50
backend/src/main.rs
Normal file
@@ -0,0 +1,50 @@
|
||||
mod api;
|
||||
mod config;
|
||||
mod error;
|
||||
mod services;
|
||||
mod state;
|
||||
|
||||
use std::net::{IpAddr, SocketAddr};
|
||||
|
||||
use axum::{routing::get, Router};
|
||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::state::AppState;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
tracing_subscriber::registry()
|
||||
.with(tracing_subscriber::EnvFilter::from_default_env())
|
||||
.with(tracing_subscriber::fmt::layer())
|
||||
.init();
|
||||
|
||||
let bind_addr = std::env::var("BIND_ADDR").unwrap_or_else(|_| "127.0.0.1:8080".to_string());
|
||||
let socket_addr: SocketAddr = bind_addr.parse()?;
|
||||
if !is_local_network(socket_addr.ip()) {
|
||||
return Err("bind address must be loopback or private network".into());
|
||||
}
|
||||
|
||||
let config = Config::from_env()?;
|
||||
let state = AppState::new(config)?;
|
||||
|
||||
let app = Router::new()
|
||||
.route("/health", get(|| async { "OK" }))
|
||||
.merge(api::untagged::router(state.clone()))
|
||||
.merge(api::untagged_delete::router(state.clone()));
|
||||
|
||||
tracing::info!("listening on {}", socket_addr);
|
||||
let listener = tokio::net::TcpListener::bind(socket_addr).await?;
|
||||
axum::serve(listener, app).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn is_local_network(ip: IpAddr) -> bool {
|
||||
match ip {
|
||||
IpAddr::V4(v4) => v4.is_loopback()
|
||||
|| v4.is_private()
|
||||
|| v4.is_link_local()
|
||||
|| v4.is_shared(),
|
||||
IpAddr::V6(v6) => v6.is_loopback() || v6.is_unique_local() || v6.is_unicast_link_local(),
|
||||
}
|
||||
}
|
||||
62
backend/src/services/audit_log.rs
Normal file
62
backend/src/services/audit_log.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::error::AppResult;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AuditEntry {
|
||||
pub id: Uuid,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
pub action_type: String,
|
||||
pub affected_paths: Vec<String>,
|
||||
pub list_file_changes: Vec<String>,
|
||||
pub outcome: String,
|
||||
pub preview_id: Option<Uuid>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AuditLog {
|
||||
path: PathBuf,
|
||||
}
|
||||
|
||||
impl AuditLog {
|
||||
pub fn new(path: PathBuf) -> Self {
|
||||
Self { path }
|
||||
}
|
||||
|
||||
pub fn append(&self, entry: &AuditEntry) -> AppResult<()> {
|
||||
let mut file = OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(&self.path)?;
|
||||
let line = serde_json::to_string(entry)?;
|
||||
writeln!(file, "{line}")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn append_mutation(
|
||||
&self,
|
||||
action_type: &str,
|
||||
affected_paths: Vec<String>,
|
||||
list_file_changes: Vec<String>,
|
||||
outcome: &str,
|
||||
preview_id: Option<Uuid>,
|
||||
) -> AppResult<AuditEntry> {
|
||||
let entry = AuditEntry {
|
||||
id: Uuid::new_v4(),
|
||||
timestamp: Utc::now(),
|
||||
action_type: action_type.to_string(),
|
||||
affected_paths,
|
||||
list_file_changes,
|
||||
outcome: outcome.to_string(),
|
||||
preview_id,
|
||||
};
|
||||
self.append(&entry)?;
|
||||
Ok(entry)
|
||||
}
|
||||
}
|
||||
83
backend/src/services/collage_sampler.rs
Normal file
83
backend/src/services/collage_sampler.rs
Normal file
@@ -0,0 +1,83 @@
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use rand::seq::SliceRandom;
|
||||
use rand::thread_rng;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::error::AppResult;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct MediaItem {
|
||||
pub id: String,
|
||||
pub user_directory_id: String,
|
||||
pub relative_path: String,
|
||||
pub size_bytes: u64,
|
||||
pub media_type: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct CollageSampler;
|
||||
|
||||
impl CollageSampler {
|
||||
pub fn sample(&self, directory_id: &str, directory: &Path, count: usize) -> AppResult<Vec<MediaItem>> {
|
||||
let mut files = Vec::new();
|
||||
collect_media_files(directory, &mut files)?;
|
||||
let mut rng = thread_rng();
|
||||
files.shuffle(&mut rng);
|
||||
let samples = files.into_iter().take(count).map(|path| {
|
||||
let relative_path = path
|
||||
.strip_prefix(directory)
|
||||
.unwrap_or(&path)
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
let size_bytes = fs::metadata(&path).map(|m| m.len()).unwrap_or(0);
|
||||
let media_type = media_type_for(&path);
|
||||
MediaItem {
|
||||
id: Uuid::new_v4().to_string(),
|
||||
user_directory_id: directory_id.to_string(),
|
||||
relative_path,
|
||||
size_bytes,
|
||||
media_type,
|
||||
}
|
||||
});
|
||||
Ok(samples.collect())
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_media_files(dir: &Path, out: &mut Vec<PathBuf>) -> AppResult<()> {
|
||||
for entry in fs::read_dir(dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let meta = entry.metadata()?;
|
||||
if meta.is_dir() {
|
||||
collect_media_files(&path, out)?;
|
||||
} else if meta.is_file() && is_media_file(&path) {
|
||||
out.push(path);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn is_media_file(path: &Path) -> bool {
|
||||
match path.extension().and_then(|e| e.to_str()).map(|e| e.to_lowercase()) {
|
||||
Some(ext) => matches!(
|
||||
ext.as_str(),
|
||||
"jpg" | "jpeg" | "png" | "gif" | "webp" | "bmp" | "mp4" | "webm" | "mkv" | "mov" | "avi"
|
||||
),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn media_type_for(path: &Path) -> String {
|
||||
match path.extension().and_then(|e| e.to_str()).map(|e| e.to_lowercase()) {
|
||||
Some(ext) if matches!(ext.as_str(), "jpg" | "jpeg" | "png" | "gif" | "webp" | "bmp") => {
|
||||
"image".to_string()
|
||||
}
|
||||
Some(ext) if matches!(ext.as_str(), "mp4" | "webm" | "mkv" | "mov" | "avi") => {
|
||||
"video".to_string()
|
||||
}
|
||||
_ => "other".to_string(),
|
||||
}
|
||||
}
|
||||
85
backend/src/services/list_file.rs
Normal file
85
backend/src/services/list_file.rs
Normal file
@@ -0,0 +1,85 @@
|
||||
use std::fs::{self, File};
|
||||
use std::io::{BufRead, BufReader, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::error::{AppError, AppResult};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DownloadListEntry {
|
||||
pub raw_line: String,
|
||||
pub normalized_value: String,
|
||||
pub matched: bool,
|
||||
}
|
||||
|
||||
pub fn load_entries(path: &Path) -> AppResult<Vec<DownloadListEntry>> {
|
||||
let file = File::open(path)?;
|
||||
let reader = BufReader::new(file);
|
||||
let mut entries = Vec::new();
|
||||
for line in reader.lines() {
|
||||
let raw = line?;
|
||||
let normalized = normalize_value(&raw);
|
||||
if normalized.is_empty() {
|
||||
continue;
|
||||
}
|
||||
entries.push(DownloadListEntry {
|
||||
raw_line: raw,
|
||||
normalized_value: normalized,
|
||||
matched: false,
|
||||
});
|
||||
}
|
||||
Ok(entries)
|
||||
}
|
||||
|
||||
pub fn match_entries(entries: &mut [DownloadListEntry], targets: &[String]) -> Vec<DownloadListEntry> {
|
||||
let normalized_targets: Vec<String> = targets.iter().map(|t| normalize_value(t)).collect();
|
||||
let mut matched = Vec::new();
|
||||
for entry in entries.iter_mut() {
|
||||
if normalized_targets.iter().any(|t| t == &entry.normalized_value) {
|
||||
entry.matched = true;
|
||||
matched.push(entry.clone());
|
||||
}
|
||||
}
|
||||
matched
|
||||
}
|
||||
|
||||
pub fn preview_removals(entries: &[DownloadListEntry]) -> (Vec<String>, Vec<String>) {
|
||||
let mut remaining = Vec::new();
|
||||
let mut removed = Vec::new();
|
||||
for entry in entries {
|
||||
if entry.matched {
|
||||
removed.push(entry.raw_line.clone());
|
||||
} else {
|
||||
remaining.push(entry.raw_line.clone());
|
||||
}
|
||||
}
|
||||
(remaining, removed)
|
||||
}
|
||||
|
||||
pub fn apply_removals_atomic(path: &Path, remaining_lines: &[String]) -> AppResult<()> {
|
||||
let temp_path = temp_path_for(path)?;
|
||||
{
|
||||
let mut file = File::create(&temp_path)?;
|
||||
for line in remaining_lines {
|
||||
writeln!(file, "{line}")?;
|
||||
}
|
||||
}
|
||||
fs::rename(temp_path, path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn normalize_value(value: &str) -> String {
|
||||
value.trim().to_lowercase()
|
||||
}
|
||||
|
||||
fn temp_path_for(path: &Path) -> AppResult<PathBuf> {
|
||||
let parent = path
|
||||
.parent()
|
||||
.ok_or_else(|| AppError::InvalidConfig("list file has no parent".to_string()))?;
|
||||
let file_name = path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.ok_or_else(|| AppError::InvalidConfig("list file has invalid name".to_string()))?;
|
||||
Ok(parent.join(format!("{file_name}.tmp")))
|
||||
}
|
||||
10
backend/src/services/mod.rs
Normal file
10
backend/src/services/mod.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
pub mod audit_log;
|
||||
pub mod collage_sampler;
|
||||
pub mod list_file;
|
||||
pub mod ops;
|
||||
pub mod ops_lock;
|
||||
pub mod path_guard;
|
||||
pub mod preview_action;
|
||||
pub mod read_only;
|
||||
pub mod state_store;
|
||||
pub mod untagged_queue;
|
||||
130
backend/src/services/ops.rs
Normal file
130
backend/src/services/ops.rs
Normal file
@@ -0,0 +1,130 @@
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::error::{AppError, AppResult};
|
||||
use crate::services::path_guard::PathGuard;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Ops {
|
||||
path_guard: PathGuard,
|
||||
whitelisted_root: PathBuf,
|
||||
kept_root: PathBuf,
|
||||
trash_root: PathBuf,
|
||||
hard_delete_enabled: bool,
|
||||
}
|
||||
|
||||
impl Ops {
|
||||
pub fn from_config(config: &Config, path_guard: PathGuard) -> AppResult<Self> {
|
||||
config.validate()?;
|
||||
Ok(Self {
|
||||
path_guard,
|
||||
whitelisted_root: config.whitelisted_root.clone(),
|
||||
kept_root: config.kept_root.clone(),
|
||||
trash_root: config.trash_root.clone(),
|
||||
hard_delete_enabled: config.hard_delete_enabled,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn move_dir(&self, from: &Path, to: &Path) -> AppResult<()> {
|
||||
self.path_guard.ensure_within_roots(from)?;
|
||||
self.path_guard.ensure_within_roots(to)?;
|
||||
self.ensure_not_symlink(from)?;
|
||||
fs::rename(from, to)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn keep_directory(&self, path: &Path) -> AppResult<PathBuf> {
|
||||
self.path_guard.ensure_within_roots(path)?;
|
||||
self.ensure_not_symlink(path)?;
|
||||
let name = path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.ok_or_else(|| AppError::InvalidConfig("invalid path".to_string()))?;
|
||||
let destination = self.kept_root.join(name);
|
||||
self.move_dir(path, &destination)?;
|
||||
Ok(destination)
|
||||
}
|
||||
|
||||
pub fn stage_delete_dir(&self, path: &Path) -> AppResult<PathBuf> {
|
||||
self.path_guard.ensure_within_roots(path)?;
|
||||
self.ensure_not_whitelisted(path)?;
|
||||
self.ensure_not_symlink(path)?;
|
||||
let staged_path = self.staged_path(path)?;
|
||||
fs::rename(path, &staged_path)?;
|
||||
Ok(staged_path)
|
||||
}
|
||||
|
||||
pub fn stage_delete_file(&self, path: &Path) -> AppResult<PathBuf> {
|
||||
self.path_guard.ensure_within_roots(path)?;
|
||||
self.ensure_not_symlink(path)?;
|
||||
let staged_path = self.staged_path(path)?;
|
||||
fs::rename(path, &staged_path)?;
|
||||
Ok(staged_path)
|
||||
}
|
||||
|
||||
pub fn hard_delete_dir(&self, path: &Path, confirmed: bool) -> AppResult<()> {
|
||||
self.path_guard.ensure_within_roots(path)?;
|
||||
self.ensure_not_whitelisted(path)?;
|
||||
self.ensure_not_symlink(path)?;
|
||||
self.ensure_hard_delete_allowed(confirmed)?;
|
||||
fs::remove_dir_all(path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn hard_delete_file(&self, path: &Path, confirmed: bool) -> AppResult<()> {
|
||||
self.path_guard.ensure_within_roots(path)?;
|
||||
self.ensure_not_symlink(path)?;
|
||||
self.ensure_hard_delete_allowed(confirmed)?;
|
||||
fs::remove_file(path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn confirm_delete_directory(&self, path: &Path, hard_delete: bool, confirmed: bool) -> AppResult<Option<PathBuf>> {
|
||||
if hard_delete {
|
||||
self.hard_delete_dir(path, confirmed)?;
|
||||
Ok(None)
|
||||
} else {
|
||||
let staged = self.stage_delete_dir(path)?;
|
||||
Ok(Some(staged))
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_not_whitelisted(&self, path: &Path) -> AppResult<()> {
|
||||
if path.starts_with(&self.whitelisted_root) {
|
||||
return Err(AppError::WhitelistProtected(path.display().to_string()));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn ensure_hard_delete_allowed(&self, confirmed: bool) -> AppResult<()> {
|
||||
if !self.hard_delete_enabled || !confirmed {
|
||||
return Err(AppError::InvalidConfig(
|
||||
"hard delete disabled or unconfirmed".to_string(),
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn ensure_not_symlink(&self, path: &Path) -> AppResult<()> {
|
||||
let metadata = fs::symlink_metadata(path)?;
|
||||
if metadata.file_type().is_symlink() {
|
||||
return Err(AppError::PathViolation(format!(
|
||||
"symlink not allowed: {}",
|
||||
path.display()
|
||||
)));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn staged_path(&self, path: &Path) -> AppResult<PathBuf> {
|
||||
let name = path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.ok_or_else(|| AppError::InvalidConfig("invalid path".to_string()))?;
|
||||
let suffix = Uuid::new_v4();
|
||||
Ok(self.trash_root.join(format!("{name}.{suffix}.staged")))
|
||||
}
|
||||
}
|
||||
18
backend/src/services/ops_lock.rs
Normal file
18
backend/src/services/ops_lock.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use tokio::sync::{Mutex, MutexGuard};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct OpsLock {
|
||||
inner: Arc<Mutex<()>>,
|
||||
}
|
||||
|
||||
impl OpsLock {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub async fn acquire(&self) -> MutexGuard<'_, ()> {
|
||||
self.inner.lock().await
|
||||
}
|
||||
}
|
||||
48
backend/src/services/path_guard.rs
Normal file
48
backend/src/services/path_guard.rs
Normal file
@@ -0,0 +1,48 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::error::{AppError, AppResult};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PathGuard {
|
||||
roots: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
impl PathGuard {
|
||||
pub fn from_config(config: &Config) -> AppResult<Self> {
|
||||
config.validate()?;
|
||||
Ok(Self {
|
||||
roots: vec![
|
||||
config.untagged_root.clone(),
|
||||
config.whitelisted_root.clone(),
|
||||
config.kept_root.clone(),
|
||||
config.trash_root.clone(),
|
||||
],
|
||||
})
|
||||
}
|
||||
|
||||
pub fn ensure_within_roots(&self, path: &Path) -> AppResult<()> {
|
||||
let normalized = normalize(path);
|
||||
for root in &self.roots {
|
||||
let root_norm = normalize(root);
|
||||
if normalized.starts_with(&root_norm) {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
Err(AppError::PathViolation(path.display().to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
fn normalize(path: &Path) -> PathBuf {
|
||||
let mut out = PathBuf::new();
|
||||
for component in path.components() {
|
||||
match component {
|
||||
std::path::Component::CurDir => {}
|
||||
std::path::Component::ParentDir => {
|
||||
out.pop();
|
||||
}
|
||||
_ => out.push(component.as_os_str()),
|
||||
}
|
||||
}
|
||||
out
|
||||
}
|
||||
83
backend/src/services/preview_action.rs
Normal file
83
backend/src/services/preview_action.rs
Normal file
@@ -0,0 +1,83 @@
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::error::{AppError, AppResult};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum PreviewActionType {
|
||||
DirectoryDelete,
|
||||
FileDelete,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PreviewAction {
|
||||
pub id: Uuid,
|
||||
pub action_type: PreviewActionType,
|
||||
pub target_paths: Vec<String>,
|
||||
pub list_file_changes_preview: Vec<String>,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub expires_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl PreviewAction {
|
||||
pub fn new(action_type: PreviewActionType, target_paths: Vec<String>, list_file_changes: Vec<String>) -> Self {
|
||||
let created_at = Utc::now();
|
||||
let expires_at = created_at + Duration::minutes(15);
|
||||
Self {
|
||||
id: Uuid::new_v4(),
|
||||
action_type,
|
||||
target_paths,
|
||||
list_file_changes_preview: list_file_changes,
|
||||
created_at,
|
||||
expires_at,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_expired(&self) -> bool {
|
||||
Utc::now() > self.expires_at
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct PreviewActionStore {
|
||||
inner: Arc<Mutex<HashMap<Uuid, PreviewAction>>>,
|
||||
}
|
||||
|
||||
impl PreviewActionStore {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn create(&self, action: PreviewAction) -> PreviewAction {
|
||||
let mut guard = self.inner.lock().expect("preview store lock");
|
||||
guard.insert(action.id, action.clone());
|
||||
action
|
||||
}
|
||||
|
||||
pub fn get(&self, id: Uuid) -> AppResult<PreviewAction> {
|
||||
let guard = self.inner.lock().expect("preview store lock");
|
||||
let action = guard
|
||||
.get(&id)
|
||||
.cloned()
|
||||
.ok_or_else(|| AppError::InvalidConfig("preview action not found".to_string()))?;
|
||||
if action.is_expired() {
|
||||
return Err(AppError::InvalidConfig("preview action expired".to_string()));
|
||||
}
|
||||
Ok(action)
|
||||
}
|
||||
|
||||
pub fn confirm(&self, id: Uuid) -> AppResult<PreviewAction> {
|
||||
let mut guard = self.inner.lock().expect("preview store lock");
|
||||
let action = guard
|
||||
.remove(&id)
|
||||
.ok_or_else(|| AppError::InvalidConfig("preview action not found".to_string()))?;
|
||||
if action.is_expired() {
|
||||
return Err(AppError::InvalidConfig("preview action expired".to_string()));
|
||||
}
|
||||
Ok(action)
|
||||
}
|
||||
}
|
||||
27
backend/src/services/read_only.rs
Normal file
27
backend/src/services/read_only.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use crate::config::Config;
|
||||
use crate::error::{AppError, AppResult};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ReadOnlyGuard {
|
||||
read_only: bool,
|
||||
}
|
||||
|
||||
impl ReadOnlyGuard {
|
||||
pub fn new(config: &Config) -> Self {
|
||||
Self {
|
||||
read_only: config.read_only_mode,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ensure_writable(&self) -> AppResult<()> {
|
||||
if self.read_only {
|
||||
Err(AppError::ReadOnly)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ensure_writable_for_operation(&self, _operation: &str) -> AppResult<()> {
|
||||
self.ensure_writable()
|
||||
}
|
||||
}
|
||||
23
backend/src/services/state_store.rs
Normal file
23
backend/src/services/state_store.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use sqlx::sqlite::{SqliteConnectOptions, SqlitePool};
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::error::AppResult;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct StateStore {
|
||||
pool: SqlitePool,
|
||||
}
|
||||
|
||||
impl StateStore {
|
||||
pub async fn connect(config: &Config) -> AppResult<Self> {
|
||||
let options = SqliteConnectOptions::from_str(&config.state_db_path.to_string_lossy())?
|
||||
.create_if_missing(true);
|
||||
let pool = SqlitePool::connect_with(options).await?;
|
||||
Ok(Self { pool })
|
||||
}
|
||||
|
||||
pub fn pool(&self) -> &SqlitePool {
|
||||
&self.pool
|
||||
}
|
||||
}
|
||||
97
backend/src/services/untagged_queue.rs
Normal file
97
backend/src/services/untagged_queue.rs
Normal file
@@ -0,0 +1,97 @@
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::error::{AppError, AppResult};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct UntaggedDirectory {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub absolute_path: PathBuf,
|
||||
pub total_size_bytes: u64,
|
||||
pub file_count: u64,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct UntaggedQueue {
|
||||
root: PathBuf,
|
||||
}
|
||||
|
||||
impl UntaggedQueue {
|
||||
pub fn new(config: &Config) -> AppResult<Self> {
|
||||
config.validate()?;
|
||||
Ok(Self {
|
||||
root: config.untagged_root.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn next_directory(&self) -> AppResult<Option<UntaggedDirectory>> {
|
||||
let mut dirs: Vec<PathBuf> = fs::read_dir(&self.root)?
|
||||
.filter_map(|entry| entry.ok())
|
||||
.map(|entry| entry.path())
|
||||
.filter(|path| path.is_dir())
|
||||
.collect();
|
||||
dirs.sort();
|
||||
let path = match dirs.first() {
|
||||
Some(path) => path.to_path_buf(),
|
||||
None => return Ok(None),
|
||||
};
|
||||
let name = path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.unwrap_or_default()
|
||||
.to_string();
|
||||
let id = relative_id(&self.root, &path)?;
|
||||
let (total_size_bytes, file_count) = dir_stats(&path)?;
|
||||
Ok(Some(UntaggedDirectory {
|
||||
id,
|
||||
name,
|
||||
absolute_path: path,
|
||||
total_size_bytes,
|
||||
file_count,
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn resolve_directory(&self, id: &str) -> AppResult<PathBuf> {
|
||||
ensure_safe_id(id)?;
|
||||
Ok(self.root.join(id))
|
||||
}
|
||||
}
|
||||
|
||||
fn dir_stats(path: &Path) -> AppResult<(u64, u64)> {
|
||||
let mut total_size = 0u64;
|
||||
let mut file_count = 0u64;
|
||||
for entry in fs::read_dir(path)? {
|
||||
let entry = entry?;
|
||||
let meta = entry.metadata()?;
|
||||
if meta.is_dir() {
|
||||
let (size, count) = dir_stats(&entry.path())?;
|
||||
total_size += size;
|
||||
file_count += count;
|
||||
} else if meta.is_file() {
|
||||
total_size += meta.len();
|
||||
file_count += 1;
|
||||
}
|
||||
}
|
||||
Ok((total_size, file_count))
|
||||
}
|
||||
|
||||
fn relative_id(root: &Path, path: &Path) -> AppResult<String> {
|
||||
let rel = path
|
||||
.strip_prefix(root)
|
||||
.map_err(|_| AppError::InvalidConfig("path outside untagged root".to_string()))?;
|
||||
Ok(rel.to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
fn ensure_safe_id(id: &str) -> AppResult<()> {
|
||||
let path = Path::new(id);
|
||||
for component in path.components() {
|
||||
if matches!(component, std::path::Component::ParentDir) {
|
||||
return Err(AppError::InvalidConfig("invalid directory id".to_string()));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
42
backend/src/state.rs
Normal file
42
backend/src/state.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::error::AppResult;
|
||||
use crate::services::{
|
||||
audit_log::AuditLog, collage_sampler::CollageSampler, ops::Ops, path_guard::PathGuard,
|
||||
preview_action::PreviewActionStore, read_only::ReadOnlyGuard, untagged_queue::UntaggedQueue,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
pub config: Arc<Config>,
|
||||
pub path_guard: PathGuard,
|
||||
pub ops: Ops,
|
||||
pub read_only: ReadOnlyGuard,
|
||||
pub audit_log: AuditLog,
|
||||
pub preview_store: PreviewActionStore,
|
||||
pub untagged_queue: UntaggedQueue,
|
||||
pub collage_sampler: CollageSampler,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
pub fn new(config: Config) -> AppResult<Self> {
|
||||
let path_guard = PathGuard::from_config(&config)?;
|
||||
let ops = Ops::from_config(&config, path_guard.clone())?;
|
||||
let read_only = ReadOnlyGuard::new(&config);
|
||||
let audit_log = AuditLog::new(config.audit_log_path.clone());
|
||||
let preview_store = PreviewActionStore::new();
|
||||
let untagged_queue = UntaggedQueue::new(&config)?;
|
||||
let collage_sampler = CollageSampler::default();
|
||||
Ok(Self {
|
||||
config: Arc::new(config),
|
||||
path_guard,
|
||||
ops,
|
||||
read_only,
|
||||
audit_log,
|
||||
preview_store,
|
||||
untagged_queue,
|
||||
collage_sampler,
|
||||
})
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user