Compare commits
44 Commits
75492c3b61
...
00024cdc0e
| Author | SHA1 | Date | |
|---|---|---|---|
| 00024cdc0e | |||
|
|
ce353f8b49 | ||
|
|
d4fbdf9273 | ||
|
|
c68a6a7d01 | ||
|
|
948fe591dc | ||
|
|
e5abcced74 | ||
|
|
3eb3d977f9 | ||
|
|
ce0b692aee | ||
|
|
cd8ce33f5e | ||
|
|
3700ba02ea | ||
|
|
f85ae4d417 | ||
|
|
ca81729c50 | ||
|
|
b48adacf51 | ||
|
|
c52ac86739 | ||
|
|
681fa0903b | ||
|
|
5dc1b0bca5 | ||
|
|
010df31455 | ||
|
|
48020b6f42 | ||
|
|
b0e22af242 | ||
|
|
4a2f3f5fdc | ||
|
|
2ebeb7e748 | ||
|
|
07f4ea8277 | ||
|
|
d40139822d | ||
|
|
cac1db0ed7 | ||
|
|
8d161589a2 | ||
|
|
8bf5150eae | ||
|
|
37b25689ff | ||
|
|
b55ac51fe2 | ||
|
|
4c94793aba | ||
|
|
3f6f8b2eff | ||
|
|
eddc0390ba | ||
|
|
011204188d | ||
|
|
a95a4c091a | ||
|
|
da4892cc30 | ||
|
|
56b5f8c67c | ||
|
|
ff4a2625f3 | ||
|
|
6d3eaf16f9 | ||
|
|
136fa200ec | ||
|
|
6dea130421 | ||
|
|
1bc657e0fd | ||
|
|
58f463867e | ||
|
|
d5a1819e2f | ||
|
|
b59a3d23aa | ||
|
|
43bd1aebf0 |
58
.cursor/rules/specify-rules.mdc
Normal file
58
.cursor/rules/specify-rules.mdc
Normal file
@@ -0,0 +1,58 @@
|
||||
# webref Development Guidelines
|
||||
|
||||
Auto-generated from all feature plans. Last updated: 2025-11-01
|
||||
|
||||
## Constitutional Principles
|
||||
|
||||
This project follows a formal constitution (`.specify/memory/constitution.md`). All development work MUST align with these principles:
|
||||
|
||||
1. **Code Quality & Maintainability** - Clear, maintainable code with proper typing
|
||||
2. **Testing Discipline** - ≥80% coverage, automated testing required
|
||||
3. **User Experience Consistency** - Intuitive, accessible interfaces
|
||||
4. **Performance & Efficiency** - Performance-first design with bounded resources
|
||||
|
||||
Reference the full constitution for detailed requirements and enforcement mechanisms.
|
||||
|
||||
## Active Technologies
|
||||
|
||||
- (001-reference-board-viewer)
|
||||
|
||||
## Project Structure
|
||||
|
||||
```text
|
||||
src/
|
||||
tests/
|
||||
```
|
||||
|
||||
## Commands
|
||||
|
||||
# Add commands for
|
||||
|
||||
## Code Style
|
||||
|
||||
: Follow standard conventions
|
||||
|
||||
### Constitutional Requirements
|
||||
|
||||
All code MUST meet these standards (per Principle 1):
|
||||
- Linter passing (zero errors/warnings)
|
||||
- Type hints on all public APIs
|
||||
- Clear single responsibilities (SRP)
|
||||
- Explicit constants (no magic numbers)
|
||||
- Comments explaining "why" not "what"
|
||||
|
||||
## Testing Standards
|
||||
|
||||
Per Constitutional Principle 2:
|
||||
- Minimum 80% test coverage required
|
||||
- Unit tests for all public functions
|
||||
- Integration tests for component interactions
|
||||
- Edge cases and error paths explicitly tested
|
||||
- Tests are deterministic, isolated, and fast (<1s unit, <10s integration)
|
||||
|
||||
## Recent Changes
|
||||
|
||||
- 001-reference-board-viewer: Added
|
||||
|
||||
<!-- MANUAL ADDITIONS START -->
|
||||
<!-- MANUAL ADDITIONS END -->
|
||||
@@ -1 +0,0 @@
|
||||
/nix/store/fw0ymh1b25q3x97wskwkl0n67d73irj1-nix-shell-env
|
||||
File diff suppressed because it is too large
Load Diff
34
.editorconfig
Normal file
34
.editorconfig
Normal file
@@ -0,0 +1,34 @@
|
||||
# EditorConfig for Reference Board Viewer
|
||||
# https://editorconfig.org
|
||||
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.{js,jsx,ts,tsx,svelte}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.{py}]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
max_line_length = 100
|
||||
|
||||
[*.{json,yaml,yml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.{md,markdown}]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
|
||||
[*.nix]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
193
.gitea/workflows/ci.yml
Normal file
193
.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,193 @@
|
||||
name: CI/CD Pipeline
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, develop, '001-*']
|
||||
pull_request:
|
||||
branches: [main, develop]
|
||||
|
||||
jobs:
|
||||
# NixOS VM integration tests (PostgreSQL + MinIO native services)
|
||||
nixos-vm-tests:
|
||||
name: VM Test - ${{ matrix.test }}
|
||||
runs-on: nixos
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
test:
|
||||
- backend-integration
|
||||
- full-stack
|
||||
- performance
|
||||
- security
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Configure Attic cache
|
||||
run: |
|
||||
attic login lan http://127.0.0.1:2343 ${{ secrets.ATTIC_TOKEN }}
|
||||
|
||||
- name: Run NixOS VM test
|
||||
run: |
|
||||
echo "Running ${{ matrix.test }} test..."
|
||||
nix build .#checks.x86_64-linux.${{ matrix.test }} --quiet --accept-flake-config
|
||||
|
||||
- name: Push to Attic cache
|
||||
if: success()
|
||||
run: |
|
||||
nix build .#checks.x86_64-linux.${{ matrix.test }} --print-out-paths | attic push lan:webref --stdin
|
||||
|
||||
# Backend linting (using Nix flake app)
|
||||
lint-backend:
|
||||
name: Backend Linting
|
||||
runs-on: nixos
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run backend linting
|
||||
run: nix run .#lint-backend
|
||||
|
||||
# Frontend linting (using Nix flake app)
|
||||
lint-frontend:
|
||||
name: Frontend Linting
|
||||
runs-on: nixos
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: true
|
||||
|
||||
- name: Install dependencies and run linting
|
||||
run: |
|
||||
# Clean any previous build artifacts
|
||||
rm -rf /tmp/frontend-build
|
||||
|
||||
# Copy frontend to /tmp to avoid noexec issues with DynamicUser
|
||||
cp -r frontend /tmp/frontend-build
|
||||
|
||||
# Verify lib files are present
|
||||
echo "Verifying frontend lib files..."
|
||||
ls -la /tmp/frontend-build/src/lib/ || echo "WARNING: lib directory not found!"
|
||||
|
||||
# Install dependencies in executable location
|
||||
nix develop --quiet --command bash -c "
|
||||
cd /tmp/frontend-build
|
||||
npm ci --prefer-offline --no-audit
|
||||
|
||||
# Run linting from the executable location
|
||||
echo '🔍 Linting frontend TypeScript/Svelte code...'
|
||||
npm run lint
|
||||
npx prettier --check src/
|
||||
npm run check
|
||||
"
|
||||
|
||||
# Cleanup
|
||||
rm -rf /tmp/frontend-build
|
||||
|
||||
# Nix flake check (needs Nix)
|
||||
nix-check:
|
||||
name: Nix Flake Check
|
||||
runs-on: nixos
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Flake check
|
||||
run: nix flake check --quiet --accept-flake-config
|
||||
|
||||
# Unit tests - DISABLED until tests are written (Phase 23)
|
||||
# unit-tests:
|
||||
# name: Unit Tests
|
||||
# runs-on: nixos
|
||||
#
|
||||
# steps:
|
||||
# - name: Checkout repository
|
||||
# uses: actions/checkout@v4
|
||||
#
|
||||
# - name: Configure Attic cache
|
||||
# run: attic login lan http://127.0.0.1:2343 ${{ secrets.ATTIC_TOKEN }}
|
||||
#
|
||||
# - name: Backend unit tests
|
||||
# run: |
|
||||
# nix develop --command bash -c "
|
||||
# cd backend &&
|
||||
# pytest tests/unit/ -v \
|
||||
# --cov=app \
|
||||
# --cov-report=xml \
|
||||
# --cov-report=term-missing
|
||||
# "
|
||||
#
|
||||
# - name: Frontend - Install deps
|
||||
# run: |
|
||||
# nix develop --command bash -c "
|
||||
# cd frontend &&
|
||||
# npm ci --prefer-offline --no-audit
|
||||
# "
|
||||
#
|
||||
# - name: Frontend unit tests
|
||||
# run: nix develop --command bash -c "cd frontend && npm run test:coverage"
|
||||
|
||||
# Build packages - DISABLED until packages are properly configured
|
||||
# TODO: Enable when backend pyproject.toml is set up and frontend package is ready
|
||||
# build:
|
||||
# name: Build Packages
|
||||
# runs-on: nixos
|
||||
#
|
||||
# steps:
|
||||
# - name: Checkout repository
|
||||
# uses: actions/checkout@v4
|
||||
#
|
||||
# - name: Configure Attic cache
|
||||
# run: attic login lan http://127.0.0.1:2343 ${{ secrets.ATTIC_TOKEN }}
|
||||
#
|
||||
# - name: Build backend package
|
||||
# run: |
|
||||
# echo "Building backend package..."
|
||||
# nix build .#backend --quiet --accept-flake-config
|
||||
#
|
||||
# - name: Push backend to Attic
|
||||
# if: success()
|
||||
# run: nix build .#backend --print-out-paths | attic push lan:webref --stdin
|
||||
#
|
||||
# - name: Build frontend package
|
||||
# run: |
|
||||
# echo "Building frontend package..."
|
||||
# nix build .#frontend --quiet --accept-flake-config
|
||||
#
|
||||
# - name: Push frontend to Attic
|
||||
# if: success()
|
||||
# run: nix build .#frontend --print-out-paths | attic push lan:webref --stdin
|
||||
|
||||
# Summary
|
||||
summary:
|
||||
name: CI Summary
|
||||
runs-on: nixos
|
||||
needs: [nixos-vm-tests, lint-backend, lint-frontend, nix-check]
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Check results
|
||||
run: |
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "📊 CI Pipeline Results"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "NixOS VMs: ${{ needs.nixos-vm-tests.result }}"
|
||||
echo "Backend Lint: ${{ needs.lint-backend.result }}"
|
||||
echo "Frontend Lint: ${{ needs.lint-frontend.result }}"
|
||||
echo "Nix Check: ${{ needs.nix-check.result }}"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
|
||||
if [[ "${{ needs.nixos-vm-tests.result }}" != "success" ]] || \
|
||||
[[ "${{ needs.lint-backend.result }}" != "success" ]] || \
|
||||
[[ "${{ needs.lint-frontend.result }}" != "success" ]] || \
|
||||
[[ "${{ needs.nix-check.result }}" != "success" ]]; then
|
||||
echo "❌ Pipeline Failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ All Checks Passed"
|
||||
101
.gitignore
vendored
Normal file
101
.gitignore
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
**/lib/
|
||||
**/lib64/
|
||||
!frontend/src/lib/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
.pytest_cache/
|
||||
.coverage
|
||||
htmlcov/
|
||||
.tox/
|
||||
.hypothesis/
|
||||
|
||||
# Virtual environments
|
||||
venv/
|
||||
ENV/
|
||||
env/
|
||||
.venv
|
||||
|
||||
# IDEs
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
.DS_Store
|
||||
|
||||
# Nix
|
||||
result
|
||||
result-*
|
||||
|
||||
# Node.js / JavaScript
|
||||
node_modules/
|
||||
pnpm-lock.yaml
|
||||
yarn.lock
|
||||
.npm
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
dist/
|
||||
.svelte-kit/
|
||||
|
||||
# Environment files
|
||||
.env
|
||||
.env.local
|
||||
.env.*.local
|
||||
*.log
|
||||
|
||||
# Database
|
||||
pgdata/
|
||||
*.sql
|
||||
*.db
|
||||
*.sqlite
|
||||
|
||||
# Development data directories (Nix services)
|
||||
.dev-data/
|
||||
|
||||
# Development VM
|
||||
.dev-vm/
|
||||
|
||||
# MinIO / Storage (legacy Docker)
|
||||
minio-data/
|
||||
|
||||
# Backend specific
|
||||
backend/.uv/
|
||||
backend/alembic/versions/__pycache__/
|
||||
|
||||
# Frontend specific
|
||||
frontend/build/
|
||||
frontend/.svelte-kit/
|
||||
frontend/dist/
|
||||
|
||||
# Project specific
|
||||
.specify/plans/*
|
||||
.specify/specs/*
|
||||
.specify/tasks/*
|
||||
!.specify/plans/.gitkeep
|
||||
!.specify/specs/.gitkeep
|
||||
!.specify/tasks/.gitkeep
|
||||
|
||||
# Keep template and memory directories
|
||||
!.specify/templates/
|
||||
!.specify/memory/
|
||||
|
||||
.direnv/
|
||||
54
.pre-commit-config.yaml
Normal file
54
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,54 @@
|
||||
repos:
|
||||
# Python linting and formatting
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.7.0
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix]
|
||||
files: ^backend/
|
||||
- id: ruff-format
|
||||
files: ^backend/
|
||||
|
||||
# JavaScript/TypeScript linting
|
||||
- repo: https://github.com/pre-commit/mirrors-eslint
|
||||
rev: v9.15.0
|
||||
hooks:
|
||||
- id: eslint
|
||||
files: \.(js|ts|svelte)$
|
||||
args: [--fix]
|
||||
additional_dependencies:
|
||||
- eslint@8.56.0
|
||||
- eslint-plugin-svelte@2.35.1
|
||||
- eslint-config-prettier@9.1.0
|
||||
- "@typescript-eslint/eslint-plugin@7.0.0"
|
||||
- "@typescript-eslint/parser@7.0.0"
|
||||
|
||||
# Prettier for formatting
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v4.0.0-alpha.8
|
||||
hooks:
|
||||
- id: prettier
|
||||
files: \.(js|ts|json|yaml|yml|md|svelte)$
|
||||
additional_dependencies:
|
||||
- prettier@3.2.5
|
||||
- prettier-plugin-svelte@3.1.2
|
||||
|
||||
# General file checks
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-json
|
||||
- id: check-added-large-files
|
||||
args: [--maxkb=5000]
|
||||
- id: check-merge-conflict
|
||||
- id: detect-private-key
|
||||
|
||||
# Nix formatting
|
||||
- repo: https://github.com/nix-community/nixpkgs-fmt
|
||||
rev: v1.3.0
|
||||
hooks:
|
||||
- id: nixpkgs-fmt
|
||||
|
||||
105
.specify/README.md
Normal file
105
.specify/README.md
Normal file
@@ -0,0 +1,105 @@
|
||||
# .specify Directory
|
||||
|
||||
This directory contains the project's governance framework and specification templates.
|
||||
|
||||
## Purpose
|
||||
|
||||
The `.specify` system provides:
|
||||
- **Constitutional Principles:** Binding rules for all development work
|
||||
- **Template Library:** Standardized formats for plans, specs, and tasks
|
||||
- **Consistency Enforcement:** Automated checks that code adheres to principles
|
||||
- **Living Documentation:** Evolving guidance that grows with the project
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
.specify/
|
||||
├── memory/
|
||||
│ └── constitution.md # Project constitution (versioned)
|
||||
├── templates/
|
||||
│ ├── plan-template.md # Feature planning template
|
||||
│ ├── spec-template.md # Technical specification template
|
||||
│ ├── tasks-template.md # Task tracking template
|
||||
│ └── commands/
|
||||
│ └── constitution.md # Constitution amendment workflow
|
||||
├── plans/ # (Create as needed) Feature plans
|
||||
├── specs/ # (Create as needed) Specifications
|
||||
└── tasks/ # (Create as needed) Task lists
|
||||
```
|
||||
|
||||
## Key Files
|
||||
|
||||
### constitution.md
|
||||
The project's binding principles covering:
|
||||
- Code quality and maintainability
|
||||
- Testing discipline (≥80% coverage)
|
||||
- User experience consistency
|
||||
- Performance and efficiency
|
||||
|
||||
**Version:** 1.0.0 (ratified 2025-11-02)
|
||||
|
||||
Every code change must align with constitutional principles. The constitution can be amended via pull request with proper versioning and impact analysis.
|
||||
|
||||
### Templates
|
||||
|
||||
#### plan-template.md
|
||||
Use for high-level feature planning. Includes:
|
||||
- Objectives and scope definition
|
||||
- Constitution alignment checklist
|
||||
- Technical approach and architecture
|
||||
- Implementation phases
|
||||
- Risk assessment
|
||||
|
||||
#### spec-template.md
|
||||
Use for detailed technical specifications. Includes:
|
||||
- Functional and non-functional requirements
|
||||
- Design and data models
|
||||
- API specifications with types
|
||||
- Testing strategy
|
||||
- Performance analysis
|
||||
|
||||
#### tasks-template.md
|
||||
Use for sprint/milestone task tracking. Includes:
|
||||
- Tasks categorized by constitutional principle
|
||||
- Estimation guidelines (S/M/L/XL)
|
||||
- Completion checklist
|
||||
- Blocked task tracking
|
||||
|
||||
## Workflow
|
||||
|
||||
1. **Plan** → Create plan from template, verify constitutional alignment
|
||||
2. **Specify** → Write detailed spec with testable requirements
|
||||
3. **Implement** → Break down into tasks, execute with continuous testing
|
||||
4. **Review** → Code review validates principle adherence
|
||||
5. **Deploy** → Merge and monitor success metrics
|
||||
|
||||
## Constitutional Compliance
|
||||
|
||||
Every plan, spec, and task must explicitly address:
|
||||
|
||||
✅ **Code Quality:** How will this maintain/improve maintainability?
|
||||
✅ **Testing:** What tests ensure correctness? (≥80% coverage)
|
||||
✅ **User Experience:** How does this impact users positively?
|
||||
✅ **Performance:** What are the performance implications?
|
||||
|
||||
## Amending the System
|
||||
|
||||
### To amend the constitution:
|
||||
1. PR to `constitution.md` with rationale
|
||||
2. Increment version (MAJOR.MINOR.PATCH)
|
||||
3. Update dependent templates
|
||||
4. Add Sync Impact Report
|
||||
|
||||
### To improve templates:
|
||||
1. PR to template file with changes
|
||||
2. Explain benefit and backward compatibility
|
||||
3. Update examples if structure changes
|
||||
|
||||
## Version History
|
||||
|
||||
- **1.0.0 (2025-11-02):** Initial specification system established with four core principles
|
||||
|
||||
## Questions?
|
||||
|
||||
Refer to the [main README](../README.md) or the constitution itself for detailed guidance.
|
||||
|
||||
@@ -1,50 +1,147 @@
|
||||
# [PROJECT_NAME] Constitution
|
||||
<!-- Example: Spec Constitution, TaskFlow Constitution, etc. -->
|
||||
<!--
|
||||
Sync Impact Report - Version 1.0.0 (2025-11-02)
|
||||
═══════════════════════════════════════════════════════════════
|
||||
|
||||
VERSION CHANGE: Initial constitution establishment (v1.0.0)
|
||||
|
||||
MODIFIED PRINCIPLES: N/A (Initial creation)
|
||||
|
||||
ADDED SECTIONS:
|
||||
✓ Principle 1: Code Quality & Maintainability
|
||||
✓ Principle 2: Testing Discipline
|
||||
✓ Principle 3: User Experience Consistency
|
||||
✓ Principle 4: Performance & Efficiency
|
||||
✓ Governance & Amendment Process
|
||||
|
||||
REMOVED SECTIONS: N/A
|
||||
|
||||
TEMPLATE SYNC STATUS:
|
||||
✅ .specify/templates/plan-template.md - Created with constitution alignment
|
||||
✅ .specify/templates/spec-template.md - Created with principle checks
|
||||
✅ .specify/templates/tasks-template.md - Created with principle-driven categories
|
||||
✅ .specify/templates/commands/constitution.md - Created with amendment workflow
|
||||
|
||||
DEFERRED ITEMS: None
|
||||
|
||||
RATIONALE: Initial constitution establishing foundational principles for code quality,
|
||||
testing standards, user experience, and performance requirements for the webref project.
|
||||
═══════════════════════════════════════════════════════════════
|
||||
-->
|
||||
|
||||
# Project Constitution
|
||||
|
||||
**Project Name:** webref
|
||||
**Constitution Version:** 1.0.0
|
||||
**Ratification Date:** 2025-11-02
|
||||
**Last Amended:** 2025-11-02
|
||||
|
||||
## Preamble
|
||||
|
||||
This constitution establishes the foundational principles and governance framework for the webref project. These principles are binding on all development work, architectural decisions, and contributions. They exist to ensure consistent quality, maintainability, user satisfaction, and technical excellence across the project lifecycle.
|
||||
|
||||
## Core Principles
|
||||
|
||||
### [PRINCIPLE_1_NAME]
|
||||
<!-- Example: I. Library-First -->
|
||||
[PRINCIPLE_1_DESCRIPTION]
|
||||
<!-- Example: Every feature starts as a standalone library; Libraries must be self-contained, independently testable, documented; Clear purpose required - no organizational-only libraries -->
|
||||
### Principle 1: Code Quality & Maintainability
|
||||
|
||||
### [PRINCIPLE_2_NAME]
|
||||
<!-- Example: II. CLI Interface -->
|
||||
[PRINCIPLE_2_DESCRIPTION]
|
||||
<!-- Example: Every library exposes functionality via CLI; Text in/out protocol: stdin/args → stdout, errors → stderr; Support JSON + human-readable formats -->
|
||||
**Declaration:** All code MUST be written with clarity, consistency, and long-term maintainability as primary concerns.
|
||||
|
||||
### [PRINCIPLE_3_NAME]
|
||||
<!-- Example: III. Test-First (NON-NEGOTIABLE) -->
|
||||
[PRINCIPLE_3_DESCRIPTION]
|
||||
<!-- Example: TDD mandatory: Tests written → User approved → Tests fail → Then implement; Red-Green-Refactor cycle strictly enforced -->
|
||||
**Requirements:**
|
||||
- Code MUST follow established style guides and formatting conventions (enforced via linters)
|
||||
- Functions and modules MUST have clear, single responsibilities (SRP)
|
||||
- Complex logic MUST be accompanied by inline comments explaining the "why"
|
||||
- Magic numbers and hard-coded values MUST be replaced with named constants
|
||||
- Code duplication beyond trivial patterns (>5 lines) MUST be refactored into reusable components
|
||||
- Type hints MUST be used for all public APIs and function signatures in Python
|
||||
- Dependencies MUST be explicitly versioned and regularly audited for security and compatibility
|
||||
|
||||
### [PRINCIPLE_4_NAME]
|
||||
<!-- Example: IV. Integration Testing -->
|
||||
[PRINCIPLE_4_DESCRIPTION]
|
||||
<!-- Example: Focus areas requiring integration tests: New library contract tests, Contract changes, Inter-service communication, Shared schemas -->
|
||||
**Rationale:** Code is read far more often than written. Maintainable code reduces cognitive load, accelerates feature development, simplifies debugging, and enables confident refactoring. Poor code quality compounds into technical debt that eventually paralyzes development velocity.
|
||||
|
||||
### [PRINCIPLE_5_NAME]
|
||||
<!-- Example: V. Observability, VI. Versioning & Breaking Changes, VII. Simplicity -->
|
||||
[PRINCIPLE_5_DESCRIPTION]
|
||||
<!-- Example: Text I/O ensures debuggability; Structured logging required; Or: MAJOR.MINOR.BUILD format; Or: Start simple, YAGNI principles -->
|
||||
**Enforcement:** Pre-commit hooks, CI linting checks, code review requirements.
|
||||
|
||||
## [SECTION_2_NAME]
|
||||
<!-- Example: Additional Constraints, Security Requirements, Performance Standards, etc. -->
|
||||
### Principle 2: Testing Discipline
|
||||
|
||||
[SECTION_2_CONTENT]
|
||||
<!-- Example: Technology stack requirements, compliance standards, deployment policies, etc. -->
|
||||
**Declaration:** All functionality MUST be validated through automated tests before merging to main branches.
|
||||
|
||||
## [SECTION_3_NAME]
|
||||
<!-- Example: Development Workflow, Review Process, Quality Gates, etc. -->
|
||||
**Requirements:**
|
||||
- Unit tests MUST cover all public functions and methods (minimum 80% coverage)
|
||||
- Integration tests MUST verify interactions between components and external dependencies
|
||||
- Edge cases and error paths MUST have explicit test coverage
|
||||
- Tests MUST be deterministic, isolated, and fast (unit tests <1s, integration <10s)
|
||||
- Test failures MUST block merges via CI/CD pipelines
|
||||
- Critical user flows MUST have end-to-end tests when applicable
|
||||
- Regression tests MUST be added for every bug fix
|
||||
- Test code MUST maintain the same quality standards as production code
|
||||
|
||||
[SECTION_3_CONTENT]
|
||||
<!-- Example: Code review requirements, testing gates, deployment approval process, etc. -->
|
||||
**Rationale:** Automated testing is the only scalable way to ensure correctness, prevent regressions, and enable confident refactoring. Manual testing alone is insufficient for maintaining quality as complexity grows. Tests serve as living documentation of expected behavior.
|
||||
|
||||
## Governance
|
||||
<!-- Example: Constitution supersedes all other practices; Amendments require documentation, approval, migration plan -->
|
||||
**Enforcement:** CI/CD pipeline gates, coverage reporting, code review checklists.
|
||||
|
||||
[GOVERNANCE_RULES]
|
||||
<!-- Example: All PRs/reviews must verify compliance; Complexity must be justified; Use [GUIDANCE_FILE] for runtime development guidance -->
|
||||
### Principle 3: User Experience Consistency
|
||||
|
||||
**Version**: [CONSTITUTION_VERSION] | **Ratified**: [RATIFICATION_DATE] | **Last Amended**: [LAST_AMENDED_DATE]
|
||||
<!-- Example: Version: 2.1.1 | Ratified: 2025-06-13 | Last Amended: 2025-07-16 -->
|
||||
**Declaration:** User-facing interfaces MUST provide consistent, intuitive, and accessible experiences across all touchpoints.
|
||||
|
||||
**Requirements:**
|
||||
- UI components MUST follow established design systems and patterns
|
||||
- Error messages MUST be clear, actionable, and user-friendly (no raw exceptions)
|
||||
- User workflows MUST be tested for common use cases before release
|
||||
- Response times for user-initiated actions MUST be <200ms or provide feedback
|
||||
- Accessibility standards (WCAG 2.1 AA minimum) MUST be met for all interfaces
|
||||
- API responses MUST follow consistent schemas and error formats
|
||||
- Documentation MUST be written for users, not developers (unless internal APIs)
|
||||
- Breaking changes to user-facing features MUST include migration paths
|
||||
|
||||
**Rationale:** Inconsistent experiences create friction, confusion, and frustration. Users develop mental models based on patterns; violations of these patterns increase cognitive load and reduce trust. Quality user experience is a competitive differentiator and retention driver.
|
||||
|
||||
**Enforcement:** Design review, usability testing, accessibility audits, API contract testing.
|
||||
|
||||
### Principle 4: Performance & Efficiency
|
||||
|
||||
**Declaration:** All systems MUST be designed and implemented with performance as a first-class concern, not an afterthought.
|
||||
|
||||
**Requirements:**
|
||||
- Performance budgets MUST be established for critical operations (API response time, page load, query execution)
|
||||
- Algorithmic complexity MUST be considered and optimized for expected data scales (prefer O(n log n) over O(n²) for large datasets)
|
||||
- Database queries MUST be indexed appropriately and avoid N+1 problems
|
||||
- Memory usage MUST be bounded and monitored (no unbounded caches or collection growth)
|
||||
- Network requests MUST be batched, cached, or minimized where possible
|
||||
- Performance regressions >10% MUST be investigated and justified before merge
|
||||
- Profiling MUST be performed for suspected bottlenecks before optimization
|
||||
- Resource-intensive operations MUST be logged and monitored in production
|
||||
|
||||
**Rationale:** Performance directly impacts user satisfaction, operational costs, and system scalability. Poor performance compounds exponentially with scale. Retrofitting performance is far more expensive than designing for it upfront. Users abandon slow systems.
|
||||
|
||||
**Enforcement:** Performance benchmarks in CI, profiling tools, load testing, production monitoring.
|
||||
|
||||
## Governance & Amendment Process
|
||||
|
||||
### Amendment Procedure
|
||||
|
||||
1. Amendments MUST be proposed via pull request to `.specify/memory/constitution.md`
|
||||
2. Proposals MUST include rationale and impact analysis on existing code/templates
|
||||
3. Amendments require explicit approval from project maintainers
|
||||
4. Version number MUST be incremented following semantic versioning:
|
||||
- **MAJOR:** Backward-incompatible principle changes (removal, fundamental redefinition)
|
||||
- **MINOR:** New principles, sections, or material expansions to existing guidance
|
||||
- **PATCH:** Clarifications, wording improvements, non-semantic refinements
|
||||
5. All dependent templates MUST be updated before amendment merge
|
||||
6. A Sync Impact Report MUST be prepended to the constitution file
|
||||
|
||||
### Compliance Review
|
||||
|
||||
- Code reviews MUST verify compliance with constitutional principles
|
||||
- CI/CD pipelines MUST enforce automated compliance checks where possible
|
||||
- Quarterly audits SHOULD assess adherence and identify systematic violations
|
||||
- Principle violations MUST be documented and justified if accepted as technical debt
|
||||
|
||||
### Living Document Commitment
|
||||
|
||||
This constitution is a living document. As the project evolves, principles may need refinement to reflect new challenges, technologies, or organizational priorities. However, the core commitment to quality, testing, user experience, and performance remains immutable.
|
||||
|
||||
## Ratified By
|
||||
|
||||
Project maintainers of webref on 2025-11-02.
|
||||
|
||||
---
|
||||
|
||||
*Version History:*
|
||||
- **v1.0.0 (2025-11-02):** Initial constitution ratified with four foundational principles
|
||||
|
||||
0
.specify/plans/.gitkeep
Normal file
0
.specify/plans/.gitkeep
Normal file
0
.specify/specs/.gitkeep
Normal file
0
.specify/specs/.gitkeep
Normal file
0
.specify/tasks/.gitkeep
Normal file
0
.specify/tasks/.gitkeep
Normal file
@@ -2,6 +2,17 @@
|
||||
|
||||
Auto-generated from all feature plans. Last updated: [DATE]
|
||||
|
||||
## Constitutional Principles
|
||||
|
||||
This project follows a formal constitution (`.specify/memory/constitution.md`). All development work MUST align with these principles:
|
||||
|
||||
1. **Code Quality & Maintainability** - Clear, maintainable code with proper typing
|
||||
2. **Testing Discipline** - ≥80% coverage, automated testing required
|
||||
3. **User Experience Consistency** - Intuitive, accessible interfaces
|
||||
4. **Performance & Efficiency** - Performance-first design with bounded resources
|
||||
|
||||
Reference the full constitution for detailed requirements and enforcement mechanisms.
|
||||
|
||||
## Active Technologies
|
||||
|
||||
[EXTRACTED FROM ALL PLAN.MD FILES]
|
||||
@@ -20,6 +31,24 @@ Auto-generated from all feature plans. Last updated: [DATE]
|
||||
|
||||
[LANGUAGE-SPECIFIC, ONLY FOR LANGUAGES IN USE]
|
||||
|
||||
### Constitutional Requirements
|
||||
|
||||
All code MUST meet these standards (per Principle 1):
|
||||
- Linter passing (zero errors/warnings)
|
||||
- Type hints on all public APIs
|
||||
- Clear single responsibilities (SRP)
|
||||
- Explicit constants (no magic numbers)
|
||||
- Comments explaining "why" not "what"
|
||||
|
||||
## Testing Standards
|
||||
|
||||
Per Constitutional Principle 2:
|
||||
- Minimum 80% test coverage required
|
||||
- Unit tests for all public functions
|
||||
- Integration tests for component interactions
|
||||
- Edge cases and error paths explicitly tested
|
||||
- Tests are deterministic, isolated, and fast (<1s unit, <10s integration)
|
||||
|
||||
## Recent Changes
|
||||
|
||||
[LAST 3 FEATURES AND WHAT THEY ADDED]
|
||||
|
||||
@@ -20,6 +20,15 @@
|
||||
============================================================================
|
||||
-->
|
||||
|
||||
## Constitutional Compliance Check
|
||||
|
||||
Before proceeding, verify alignment with constitutional principles:
|
||||
|
||||
- [ ] **Code Quality (Principle 1):** Design maintains/improves maintainability
|
||||
- [ ] **Testing (Principle 2):** ≥80% coverage plan established
|
||||
- [ ] **UX Consistency (Principle 3):** User impact documented and positive
|
||||
- [ ] **Performance (Principle 4):** Performance budget and complexity analyzed
|
||||
|
||||
## [Category 1]
|
||||
|
||||
- [ ] CHK001 First checklist item with clear action
|
||||
@@ -32,6 +41,16 @@
|
||||
- [ ] CHK005 Item with specific criteria
|
||||
- [ ] CHK006 Final item in this category
|
||||
|
||||
## Pre-Merge Validation
|
||||
|
||||
Per constitutional requirements:
|
||||
|
||||
- [ ] All tests passing (≥80% coverage maintained)
|
||||
- [ ] Linter/type checker passing (zero errors)
|
||||
- [ ] Code review approved with principle verification
|
||||
- [ ] Documentation updated
|
||||
- [ ] Performance benchmarks met (if applicable)
|
||||
|
||||
## Notes
|
||||
|
||||
- Check items off as completed: `[x]`
|
||||
|
||||
81
.specify/templates/commands/constitution.md
Normal file
81
.specify/templates/commands/constitution.md
Normal file
@@ -0,0 +1,81 @@
|
||||
---
|
||||
description: Create or update the project constitution from interactive or provided principle inputs, ensuring all dependent templates stay in sync
|
||||
---
|
||||
|
||||
## User Input
|
||||
|
||||
```text
|
||||
[User's request for constitutional changes]
|
||||
```
|
||||
|
||||
You **MUST** consider the user input before proceeding (if not empty).
|
||||
|
||||
## Outline
|
||||
|
||||
You are updating the project constitution at `.specify/memory/constitution.md`. This file is a TEMPLATE containing placeholder tokens in square brackets (e.g. `[PROJECT_NAME]`, `[PRINCIPLE_1_NAME]`). Your job is to (a) collect/derive concrete values, (b) fill the template precisely, and (c) propagate any amendments across dependent artifacts.
|
||||
|
||||
Follow this execution flow:
|
||||
|
||||
1. Load the existing constitution template at `.specify/memory/constitution.md`.
|
||||
- Identify every placeholder token of the form `[ALL_CAPS_IDENTIFIER]`.
|
||||
**IMPORTANT**: The user might require less or more principles than the ones used in the template. If a number is specified, respect that - follow the general template. You will update the doc accordingly.
|
||||
|
||||
2. Collect/derive values for placeholders:
|
||||
- If user input (conversation) supplies a value, use it.
|
||||
- Otherwise infer from existing repo context (README, docs, prior constitution versions if embedded).
|
||||
- For governance dates: `RATIFICATION_DATE` is the original adoption date (if unknown ask or mark TODO), `LAST_AMENDED_DATE` is today if changes are made, otherwise keep previous.
|
||||
- `CONSTITUTION_VERSION` must increment according to semantic versioning rules:
|
||||
- MAJOR: Backward incompatible governance/principle removals or redefinitions.
|
||||
- MINOR: New principle/section added or materially expanded guidance.
|
||||
- PATCH: Clarifications, wording, typo fixes, non-semantic refinements.
|
||||
- If version bump type ambiguous, propose reasoning before finalizing.
|
||||
|
||||
3. Draft the updated constitution content:
|
||||
- Replace every placeholder with concrete text (no bracketed tokens left except intentionally retained template slots that the project has chosen not to define yet—explicitly justify any left).
|
||||
- Preserve heading hierarchy and comments can be removed once replaced unless they still add clarifying guidance.
|
||||
- Ensure each Principle section: succinct name line, paragraph (or bullet list) capturing non‑negotiable rules, explicit rationale if not obvious.
|
||||
- Ensure Governance section lists amendment procedure, versioning policy, and compliance review expectations.
|
||||
|
||||
4. Consistency propagation checklist (convert prior checklist into active validations):
|
||||
- Read `.specify/templates/plan-template.md` and ensure any "Constitution Check" or rules align with updated principles.
|
||||
- Read `.specify/templates/spec-template.md` for scope/requirements alignment—update if constitution adds/removes mandatory sections or constraints.
|
||||
- Read `.specify/templates/tasks-template.md` and ensure task categorization reflects new or removed principle-driven task types (e.g., observability, versioning, testing discipline).
|
||||
- Read each command file in `.specify/templates/commands/*.md` (including this one) to verify no outdated references (agent-specific names like CLAUDE only) remain when generic guidance is required.
|
||||
- Read any runtime guidance docs (e.g., `README.md`, `docs/quickstart.md`, or agent-specific guidance files if present). Update references to principles changed.
|
||||
|
||||
5. Produce a Sync Impact Report (prepend as an HTML comment at top of the constitution file after update):
|
||||
- Version change: old → new
|
||||
- List of modified principles (old title → new title if renamed)
|
||||
- Added sections
|
||||
- Removed sections
|
||||
- Templates requiring updates (✅ updated / ⚠ pending) with file paths
|
||||
- Follow-up TODOs if any placeholders intentionally deferred.
|
||||
|
||||
6. Validation before final output:
|
||||
- No remaining unexplained bracket tokens.
|
||||
- Version line matches report.
|
||||
- Dates ISO format YYYY-MM-DD.
|
||||
- Principles are declarative, testable, and free of vague language ("should" → replace with MUST/SHOULD rationale where appropriate).
|
||||
|
||||
7. Write the completed constitution back to `.specify/memory/constitution.md` (overwrite).
|
||||
|
||||
8. Output a final summary to the user with:
|
||||
- New version and bump rationale.
|
||||
- Any files flagged for manual follow-up.
|
||||
- Suggested commit message (e.g., `docs: amend constitution to vX.Y.Z (principle additions + governance update)`).
|
||||
|
||||
Formatting & Style Requirements:
|
||||
|
||||
- Use Markdown headings exactly as in the template (do not demote/promote levels).
|
||||
- Wrap long rationale lines to keep readability (<100 chars ideally) but do not hard enforce with awkward breaks.
|
||||
- Keep a single blank line between sections.
|
||||
- Avoid trailing whitespace.
|
||||
|
||||
If the user supplies partial updates (e.g., only one principle revision), still perform validation and version decision steps.
|
||||
|
||||
If critical info missing (e.g., ratification date truly unknown), insert `TODO(<FIELD_NAME>): explanation` and include in the Sync Impact Report under deferred items.
|
||||
|
||||
Do not create a new template; always operate on the existing `.specify/memory/constitution.md` file.
|
||||
|
||||
--- End Command ---
|
||||
|
||||
@@ -1,104 +1,97 @@
|
||||
# Implementation Plan: [FEATURE]
|
||||
# Plan: [FEATURE_NAME]
|
||||
|
||||
**Branch**: `[###-feature-name]` | **Date**: [DATE] | **Spec**: [link]
|
||||
**Input**: Feature specification from `/specs/[###-feature-name]/spec.md`
|
||||
**Created:** [YYYY-MM-DD]
|
||||
**Status:** [Draft | Active | Completed | Obsolete]
|
||||
**Owner:** [OWNER_NAME]
|
||||
|
||||
**Note**: This template is filled in by the `/speckit.plan` command. See `.specify/templates/commands/plan.md` for the execution workflow.
|
||||
## Overview
|
||||
|
||||
## Summary
|
||||
Brief description of what this plan aims to achieve and why it's important.
|
||||
|
||||
[Extract from feature spec: primary requirement + technical approach from research]
|
||||
## Objectives
|
||||
|
||||
## Technical Context
|
||||
- [ ] Primary objective 1
|
||||
- [ ] Primary objective 2
|
||||
- [ ] Primary objective 3
|
||||
|
||||
<!--
|
||||
ACTION REQUIRED: Replace the content in this section with the technical details
|
||||
for the project. The structure here is presented in advisory capacity to guide
|
||||
the iteration process.
|
||||
-->
|
||||
## Constitution Alignment Check
|
||||
|
||||
**Language/Version**: [e.g., Python 3.11, Swift 5.9, Rust 1.75 or NEEDS CLARIFICATION]
|
||||
**Primary Dependencies**: [e.g., FastAPI, UIKit, LLVM or NEEDS CLARIFICATION]
|
||||
**Storage**: [if applicable, e.g., PostgreSQL, CoreData, files or N/A]
|
||||
**Testing**: [e.g., pytest, XCTest, cargo test or NEEDS CLARIFICATION]
|
||||
**Target Platform**: [e.g., Linux server, iOS 15+, WASM or NEEDS CLARIFICATION]
|
||||
**Project Type**: [single/web/mobile - determines source structure]
|
||||
**Performance Goals**: [domain-specific, e.g., 1000 req/s, 10k lines/sec, 60 fps or NEEDS CLARIFICATION]
|
||||
**Constraints**: [domain-specific, e.g., <200ms p95, <100MB memory, offline-capable or NEEDS CLARIFICATION]
|
||||
**Scale/Scope**: [domain-specific, e.g., 10k users, 1M LOC, 50 screens or NEEDS CLARIFICATION]
|
||||
Before proceeding, verify alignment with constitutional principles:
|
||||
|
||||
## Constitution Check
|
||||
- **Code Quality & Maintainability:** How will this maintain/improve code quality?
|
||||
- [ ] Design follows single responsibility principle
|
||||
- [ ] Clear module boundaries defined
|
||||
- [ ] Dependencies justified and documented
|
||||
|
||||
*GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.*
|
||||
- **Testing Discipline:** What testing strategy will ensure correctness?
|
||||
- [ ] Unit test coverage plan (≥80%)
|
||||
- [ ] Integration test scenarios identified
|
||||
- [ ] Edge cases documented
|
||||
|
||||
[Gates determined based on constitution file]
|
||||
- **User Experience Consistency:** How does this impact users?
|
||||
- [ ] UI/API changes follow existing patterns
|
||||
- [ ] Error handling is user-friendly
|
||||
- [ ] Documentation plan complete
|
||||
|
||||
## Project Structure
|
||||
- **Performance & Efficiency:** What are the performance implications?
|
||||
- [ ] Performance budget established
|
||||
- [ ] Algorithmic complexity analyzed
|
||||
- [ ] Resource usage estimated
|
||||
|
||||
### Documentation (this feature)
|
||||
## Scope
|
||||
|
||||
```text
|
||||
specs/[###-feature]/
|
||||
├── plan.md # This file (/speckit.plan command output)
|
||||
├── research.md # Phase 0 output (/speckit.plan command)
|
||||
├── data-model.md # Phase 1 output (/speckit.plan command)
|
||||
├── quickstart.md # Phase 1 output (/speckit.plan command)
|
||||
├── contracts/ # Phase 1 output (/speckit.plan command)
|
||||
└── tasks.md # Phase 2 output (/speckit.tasks command - NOT created by /speckit.plan)
|
||||
```
|
||||
### In Scope
|
||||
- What will be built/changed
|
||||
- Explicit boundaries
|
||||
|
||||
### Source Code (repository root)
|
||||
<!--
|
||||
ACTION REQUIRED: Replace the placeholder tree below with the concrete layout
|
||||
for this feature. Delete unused options and expand the chosen structure with
|
||||
real paths (e.g., apps/admin, packages/something). The delivered plan must
|
||||
not include Option labels.
|
||||
-->
|
||||
### Out of Scope
|
||||
- What will NOT be addressed
|
||||
- Deferred items for future work
|
||||
|
||||
```text
|
||||
# [REMOVE IF UNUSED] Option 1: Single project (DEFAULT)
|
||||
src/
|
||||
├── models/
|
||||
├── services/
|
||||
├── cli/
|
||||
└── lib/
|
||||
## Technical Approach
|
||||
|
||||
tests/
|
||||
├── contract/
|
||||
├── integration/
|
||||
└── unit/
|
||||
High-level technical strategy and architectural decisions.
|
||||
|
||||
# [REMOVE IF UNUSED] Option 2: Web application (when "frontend" + "backend" detected)
|
||||
backend/
|
||||
├── src/
|
||||
│ ├── models/
|
||||
│ ├── services/
|
||||
│ └── api/
|
||||
└── tests/
|
||||
### Key Components
|
||||
1. Component A: Purpose and responsibilities
|
||||
2. Component B: Purpose and responsibilities
|
||||
3. Component C: Purpose and responsibilities
|
||||
|
||||
frontend/
|
||||
├── src/
|
||||
│ ├── components/
|
||||
│ ├── pages/
|
||||
│ └── services/
|
||||
└── tests/
|
||||
### Dependencies
|
||||
- Internal dependencies (other modules/services)
|
||||
- External dependencies (libraries, APIs, services)
|
||||
|
||||
# [REMOVE IF UNUSED] Option 3: Mobile + API (when "iOS/Android" detected)
|
||||
api/
|
||||
└── [same as backend above]
|
||||
### Risks & Mitigations
|
||||
| Risk | Impact | Probability | Mitigation Strategy |
|
||||
|------|--------|-------------|---------------------|
|
||||
| Risk 1 | High/Med/Low | High/Med/Low | How we'll address it |
|
||||
|
||||
ios/ or android/
|
||||
└── [platform-specific structure: feature modules, UI flows, platform tests]
|
||||
```
|
||||
## Implementation Phases
|
||||
|
||||
**Structure Decision**: [Document the selected structure and reference the real
|
||||
directories captured above]
|
||||
### Phase 1: [Name] (Est: X days)
|
||||
- Milestone 1
|
||||
- Milestone 2
|
||||
|
||||
## Complexity Tracking
|
||||
### Phase 2: [Name] (Est: X days)
|
||||
- Milestone 3
|
||||
- Milestone 4
|
||||
|
||||
> **Fill ONLY if Constitution Check has violations that must be justified**
|
||||
## Success Criteria
|
||||
|
||||
| Violation | Why Needed | Simpler Alternative Rejected Because |
|
||||
|-----------|------------|-------------------------------------|
|
||||
| [e.g., 4th project] | [current need] | [why 3 projects insufficient] |
|
||||
| [e.g., Repository pattern] | [specific problem] | [why direct DB access insufficient] |
|
||||
Clear, measurable criteria for completion:
|
||||
- [ ] All tests passing with ≥80% coverage
|
||||
- [ ] Performance benchmarks met
|
||||
- [ ] Documentation complete
|
||||
- [ ] Code review approved
|
||||
- [ ] Production deployment successful
|
||||
|
||||
## Open Questions
|
||||
|
||||
- [ ] Question 1 that needs resolution
|
||||
- [ ] Question 2 that needs research
|
||||
|
||||
## References
|
||||
|
||||
- Link to specs
|
||||
- Related plans
|
||||
- External documentation
|
||||
|
||||
@@ -1,115 +1,181 @@
|
||||
# Feature Specification: [FEATURE NAME]
|
||||
# Specification: [FEATURE_NAME]
|
||||
|
||||
**Feature Branch**: `[###-feature-name]`
|
||||
**Created**: [DATE]
|
||||
**Status**: Draft
|
||||
**Input**: User description: "$ARGUMENTS"
|
||||
**Version:** [X.Y.Z]
|
||||
**Created:** [YYYY-MM-DD]
|
||||
**Last Updated:** [YYYY-MM-DD]
|
||||
**Status:** [Draft | Review | Approved | Implemented]
|
||||
**Owner:** [OWNER_NAME]
|
||||
|
||||
## User Scenarios & Testing *(mandatory)*
|
||||
## Purpose
|
||||
|
||||
<!--
|
||||
IMPORTANT: User stories should be PRIORITIZED as user journeys ordered by importance.
|
||||
Each user story/journey must be INDEPENDENTLY TESTABLE - meaning if you implement just ONE of them,
|
||||
you should still have a viable MVP (Minimum Viable Product) that delivers value.
|
||||
Clear statement of what this specification defines and its business/technical value.
|
||||
|
||||
Assign priorities (P1, P2, P3, etc.) to each story, where P1 is the most critical.
|
||||
Think of each story as a standalone slice of functionality that can be:
|
||||
- Developed independently
|
||||
- Tested independently
|
||||
- Deployed independently
|
||||
- Demonstrated to users independently
|
||||
-->
|
||||
|
||||
### User Story 1 - [Brief Title] (Priority: P1)
|
||||
|
||||
[Describe this user journey in plain language]
|
||||
|
||||
**Why this priority**: [Explain the value and why it has this priority level]
|
||||
|
||||
**Independent Test**: [Describe how this can be tested independently - e.g., "Can be fully tested by [specific action] and delivers [specific value]"]
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** [initial state], **When** [action], **Then** [expected outcome]
|
||||
2. **Given** [initial state], **When** [action], **Then** [expected outcome]
|
||||
|
||||
---
|
||||
|
||||
### User Story 2 - [Brief Title] (Priority: P2)
|
||||
|
||||
[Describe this user journey in plain language]
|
||||
|
||||
**Why this priority**: [Explain the value and why it has this priority level]
|
||||
|
||||
**Independent Test**: [Describe how this can be tested independently]
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** [initial state], **When** [action], **Then** [expected outcome]
|
||||
|
||||
---
|
||||
|
||||
### User Story 3 - [Brief Title] (Priority: P3)
|
||||
|
||||
[Describe this user journey in plain language]
|
||||
|
||||
**Why this priority**: [Explain the value and why it has this priority level]
|
||||
|
||||
**Independent Test**: [Describe how this can be tested independently]
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** [initial state], **When** [action], **Then** [expected outcome]
|
||||
|
||||
---
|
||||
|
||||
[Add more user stories as needed, each with an assigned priority]
|
||||
|
||||
### Edge Cases
|
||||
|
||||
<!--
|
||||
ACTION REQUIRED: The content in this section represents placeholders.
|
||||
Fill them out with the right edge cases.
|
||||
-->
|
||||
|
||||
- What happens when [boundary condition]?
|
||||
- How does system handle [error scenario]?
|
||||
|
||||
## Requirements *(mandatory)*
|
||||
|
||||
<!--
|
||||
ACTION REQUIRED: The content in this section represents placeholders.
|
||||
Fill them out with the right functional requirements.
|
||||
-->
|
||||
## Requirements
|
||||
|
||||
### Functional Requirements
|
||||
|
||||
- **FR-001**: System MUST [specific capability, e.g., "allow users to create accounts"]
|
||||
- **FR-002**: System MUST [specific capability, e.g., "validate email addresses"]
|
||||
- **FR-003**: Users MUST be able to [key interaction, e.g., "reset their password"]
|
||||
- **FR-004**: System MUST [data requirement, e.g., "persist user preferences"]
|
||||
- **FR-005**: System MUST [behavior, e.g., "log all security events"]
|
||||
#### FR1: [Requirement Name]
|
||||
**Priority:** [Critical | High | Medium | Low]
|
||||
**Description:** Detailed description of the requirement.
|
||||
|
||||
*Example of marking unclear requirements:*
|
||||
**Acceptance Criteria:**
|
||||
- [ ] Criterion 1 (testable condition)
|
||||
- [ ] Criterion 2 (testable condition)
|
||||
- [ ] Criterion 3 (testable condition)
|
||||
|
||||
- **FR-006**: System MUST authenticate users via [NEEDS CLARIFICATION: auth method not specified - email/password, SSO, OAuth?]
|
||||
- **FR-007**: System MUST retain user data for [NEEDS CLARIFICATION: retention period not specified]
|
||||
**Constitutional Alignment:**
|
||||
- Testing: [How this will be tested per Principle 2]
|
||||
- UX Impact: [User-facing implications per Principle 3]
|
||||
- Performance: [Performance considerations per Principle 4]
|
||||
|
||||
### Key Entities *(include if feature involves data)*
|
||||
#### FR2: [Requirement Name]
|
||||
[Repeat structure above]
|
||||
|
||||
- **[Entity 1]**: [What it represents, key attributes without implementation]
|
||||
- **[Entity 2]**: [What it represents, relationships to other entities]
|
||||
### Non-Functional Requirements
|
||||
|
||||
## Success Criteria *(mandatory)*
|
||||
#### NFR1: Performance
|
||||
Per Constitutional Principle 4:
|
||||
- Response time: [target, e.g., <200ms for p95]
|
||||
- Throughput: [target, e.g., >1000 req/s]
|
||||
- Resource limits: [memory/CPU bounds]
|
||||
- Scalability: [expected load ranges]
|
||||
|
||||
<!--
|
||||
ACTION REQUIRED: Define measurable success criteria.
|
||||
These must be technology-agnostic and measurable.
|
||||
-->
|
||||
#### NFR2: Quality
|
||||
Per Constitutional Principle 1:
|
||||
- Code coverage: ≥80% (Principle 2 requirement)
|
||||
- Linting: Zero errors/warnings
|
||||
- Type safety: Full type hints on public APIs
|
||||
- Documentation: All public APIs documented
|
||||
|
||||
### Measurable Outcomes
|
||||
#### NFR3: User Experience
|
||||
Per Constitutional Principle 3:
|
||||
- Accessibility: WCAG 2.1 AA compliance
|
||||
- Error handling: User-friendly messages
|
||||
- Consistency: Follows existing design patterns
|
||||
- Response feedback: <200ms or progress indicators
|
||||
|
||||
- **SC-001**: [Measurable metric, e.g., "Users can complete account creation in under 2 minutes"]
|
||||
- **SC-002**: [Measurable metric, e.g., "System handles 1000 concurrent users without degradation"]
|
||||
- **SC-003**: [User satisfaction metric, e.g., "90% of users successfully complete primary task on first attempt"]
|
||||
- **SC-004**: [Business metric, e.g., "Reduce support tickets related to [X] by 50%"]
|
||||
#### NFR4: Maintainability
|
||||
Per Constitutional Principle 1:
|
||||
- Complexity: Cyclomatic complexity <10 per function
|
||||
- Dependencies: Explicit versioning, security audit
|
||||
- Modularity: Clear separation of concerns
|
||||
|
||||
## Design
|
||||
|
||||
### Architecture Overview
|
||||
[Diagram or description of system components and their interactions]
|
||||
|
||||
### Data Models
|
||||
```python
|
||||
# Example data structures with type hints
|
||||
class ExampleModel:
|
||||
"""Clear docstring explaining purpose."""
|
||||
field1: str
|
||||
field2: int
|
||||
field3: Optional[List[str]]
|
||||
```
|
||||
|
||||
### API/Interface Specifications
|
||||
|
||||
#### Endpoint/Method: [Name]
|
||||
```python
|
||||
def example_function(param1: str, param2: int) -> ResultType:
|
||||
"""
|
||||
Clear description of what this does.
|
||||
|
||||
Args:
|
||||
param1: Description of parameter
|
||||
param2: Description of parameter
|
||||
|
||||
Returns:
|
||||
Description of return value
|
||||
|
||||
Raises:
|
||||
ValueError: When validation fails
|
||||
"""
|
||||
pass
|
||||
```
|
||||
|
||||
**Error Handling:**
|
||||
- Error case 1: Response/behavior
|
||||
- Error case 2: Response/behavior
|
||||
|
||||
### Testing Strategy
|
||||
|
||||
#### Unit Tests
|
||||
- Component A: [Test scenarios]
|
||||
- Component B: [Test scenarios]
|
||||
- Edge cases: [List critical edge cases]
|
||||
|
||||
#### Integration Tests
|
||||
- Integration point 1: [Test scenario]
|
||||
- Integration point 2: [Test scenario]
|
||||
|
||||
#### Performance Tests
|
||||
- Benchmark 1: [Target metric]
|
||||
- Load test: [Expected traffic pattern]
|
||||
|
||||
## Implementation Considerations
|
||||
|
||||
### Performance Analysis
|
||||
- Algorithmic complexity: [Big-O analysis]
|
||||
- Database queries: [Query plans, indexes needed]
|
||||
- Caching strategy: [What, when, invalidation]
|
||||
- Bottleneck prevention: [Known risks and mitigations]
|
||||
|
||||
### Security Considerations
|
||||
- Authentication/Authorization requirements
|
||||
- Input validation requirements
|
||||
- Data protection measures
|
||||
|
||||
### Migration Path
|
||||
If this changes existing functionality:
|
||||
- Backward compatibility strategy
|
||||
- User migration steps
|
||||
- Rollback plan
|
||||
|
||||
## Dependencies
|
||||
|
||||
### Internal Dependencies
|
||||
- Module/Service A: [Why needed]
|
||||
- Module/Service B: [Why needed]
|
||||
|
||||
### External Dependencies
|
||||
```python
|
||||
# New dependencies to add (with justification)
|
||||
package-name==X.Y.Z # Why: specific reason for this dependency
|
||||
```
|
||||
|
||||
## Rollout Plan
|
||||
|
||||
1. **Development:** [Timeline and milestones]
|
||||
2. **Testing:** [QA approach and environments]
|
||||
3. **Staging:** [Validation steps]
|
||||
4. **Production:** [Deployment strategy - canary/blue-green/etc]
|
||||
5. **Monitoring:** [Key metrics to watch]
|
||||
|
||||
## Success Metrics
|
||||
|
||||
Post-deployment validation:
|
||||
- [ ] All acceptance criteria met
|
||||
- [ ] Performance benchmarks achieved
|
||||
- [ ] Zero critical bugs in first week
|
||||
- [ ] User feedback collected and positive
|
||||
- [ ] Test coverage ≥80% maintained
|
||||
|
||||
## Open Issues
|
||||
|
||||
- [ ] Issue 1 requiring resolution
|
||||
- [ ] Issue 2 needing decision
|
||||
|
||||
## Appendix
|
||||
|
||||
### References
|
||||
- Related specifications
|
||||
- External documentation
|
||||
- Research materials
|
||||
|
||||
### Change Log
|
||||
| Version | Date | Author | Changes |
|
||||
|---------|------|--------|---------|
|
||||
| 1.0.0 | YYYY-MM-DD | Name | Initial specification |
|
||||
|
||||
@@ -1,251 +1,148 @@
|
||||
---
|
||||
# Tasks: [FEATURE/AREA_NAME]
|
||||
|
||||
description: "Task list template for feature implementation"
|
||||
---
|
||||
**Created:** [YYYY-MM-DD]
|
||||
**Last Updated:** [YYYY-MM-DD]
|
||||
**Sprint/Milestone:** [IDENTIFIER]
|
||||
|
||||
# Tasks: [FEATURE NAME]
|
||||
## Overview
|
||||
|
||||
Brief context for this task list and its relationship to plans/specs.
|
||||
|
||||
**Input**: Design documents from `/specs/[###-feature-name]/`
|
||||
**Prerequisites**: plan.md (required), spec.md (required for user stories), research.md, data-model.md, contracts/
|
||||
## Task Categories
|
||||
|
||||
**Tests**: The examples below include test tasks. Tests are OPTIONAL - only include them if explicitly requested in the feature specification.
|
||||
Tasks are organized by constitutional principle to ensure balanced development:
|
||||
|
||||
**Organization**: Tasks are grouped by user story to enable independent implementation and testing of each story.
|
||||
### 🏗️ Implementation Tasks (Principle 1: Code Quality)
|
||||
|
||||
- [ ] **[TASK-001]** Task title
|
||||
- **Description:** What needs to be done
|
||||
- **Acceptance:** How to verify completion
|
||||
- **Estimate:** [S/M/L/XL or hours]
|
||||
- **Dependencies:** [Other task IDs]
|
||||
- **Quality checklist:**
|
||||
- [ ] Follows style guide (linter passes)
|
||||
- [ ] Type hints added
|
||||
- [ ] No code duplication
|
||||
- [ ] Comments explain "why" not "what"
|
||||
|
||||
- [ ] **[TASK-002]** Next task...
|
||||
|
||||
### 🧪 Testing Tasks (Principle 2: Testing Discipline)
|
||||
|
||||
## Format: `[ID] [P?] [Story] Description`
|
||||
- [ ] **[TEST-001]** Write unit tests for [Component]
|
||||
- **Coverage target:** ≥80% for new code
|
||||
- **Test scenarios:**
|
||||
- [ ] Happy path
|
||||
- [ ] Edge case 1
|
||||
- [ ] Edge case 2
|
||||
- [ ] Error handling
|
||||
- **Estimate:** [S/M/L/XL]
|
||||
|
||||
- **[P]**: Can run in parallel (different files, no dependencies)
|
||||
- **[Story]**: Which user story this task belongs to (e.g., US1, US2, US3)
|
||||
- Include exact file paths in descriptions
|
||||
- [ ] **[TEST-002]** Integration tests for [Feature]
|
||||
- **Scope:** [Component interactions to validate]
|
||||
- **Performance target:** <10s execution time
|
||||
|
||||
## Path Conventions
|
||||
- [ ] **[TEST-003]** Regression test for [Bug #X]
|
||||
- **Bug reference:** [Link to issue]
|
||||
- **Reproduction steps:** [Documented]
|
||||
|
||||
- **Single project**: `src/`, `tests/` at repository root
|
||||
- **Web app**: `backend/src/`, `frontend/src/`
|
||||
- **Mobile**: `api/src/`, `ios/src/` or `android/src/`
|
||||
- Paths shown below assume single project - adjust based on plan.md structure
|
||||
### 👤 User Experience Tasks (Principle 3: UX Consistency)
|
||||
|
||||
<!--
|
||||
============================================================================
|
||||
IMPORTANT: The tasks below are SAMPLE TASKS for illustration purposes only.
|
||||
- [ ] **[UX-001]** Design/implement [UI Component]
|
||||
- **Design system alignment:** [Pattern/component to follow]
|
||||
- **Accessibility checklist:**
|
||||
- [ ] Keyboard navigable
|
||||
- [ ] Screen reader compatible
|
||||
- [ ] Color contrast WCAG AA
|
||||
- [ ] Focus indicators visible
|
||||
- **Estimate:** [S/M/L/XL]
|
||||
|
||||
The /speckit.tasks command MUST replace these with actual tasks based on:
|
||||
- User stories from spec.md (with their priorities P1, P2, P3...)
|
||||
- Feature requirements from plan.md
|
||||
- Entities from data-model.md
|
||||
- Endpoints from contracts/
|
||||
- [ ] **[UX-002]** Error message improvement for [Feature]
|
||||
- **Current message:** [What users see now]
|
||||
- **Improved message:** [Clear, actionable alternative]
|
||||
- **Context provided:** [Where, why, what to do]
|
||||
|
||||
Tasks MUST be organized by user story so each story can be:
|
||||
- Implemented independently
|
||||
- Tested independently
|
||||
- Delivered as an MVP increment
|
||||
- [ ] **[UX-003]** User documentation for [Feature]
|
||||
- **Target audience:** [End users/API consumers/admins]
|
||||
- **Format:** [README/Wiki/API docs/Tutorial]
|
||||
|
||||
DO NOT keep these sample tasks in the generated tasks.md file.
|
||||
============================================================================
|
||||
-->
|
||||
### ⚡ Performance Tasks (Principle 4: Performance & Efficiency)
|
||||
|
||||
## Phase 1: Setup (Shared Infrastructure)
|
||||
- [ ] **[PERF-001]** Optimize [Operation/Query]
|
||||
- **Current performance:** [Baseline metric]
|
||||
- **Target performance:** [Goal metric]
|
||||
- **Approach:** [Algorithm change/caching/indexing/etc]
|
||||
- **Estimate:** [S/M/L/XL]
|
||||
|
||||
**Purpose**: Project initialization and basic structure
|
||||
- [ ] **[PERF-002]** Add performance benchmark for [Feature]
|
||||
- **Metric:** [Response time/throughput/memory]
|
||||
- **Budget:** [Threshold that triggers alert]
|
||||
- **CI integration:** [How it blocks bad merges]
|
||||
|
||||
- [ ] T001 Create project structure per implementation plan
|
||||
- [ ] T002 Initialize [language] project with [framework] dependencies
|
||||
- [ ] T003 [P] Configure linting and formatting tools
|
||||
- [ ] **[PERF-003]** Profile and fix [Bottleneck]
|
||||
- **Profiling tool:** [Tool to use]
|
||||
- **Suspected issue:** [Hypothesis]
|
||||
- **Verification:** [How to confirm fix]
|
||||
|
||||
---
|
||||
### 🔧 Infrastructure/DevOps Tasks
|
||||
|
||||
## Phase 2: Foundational (Blocking Prerequisites)
|
||||
- [ ] **[INFRA-001]** Setup [Tool/Service]
|
||||
- **Purpose:** [Why this is needed]
|
||||
- **Configuration:** [Key settings]
|
||||
- **Documentation:** [Where to document setup]
|
||||
|
||||
**Purpose**: Core infrastructure that MUST be complete before ANY user story can be implemented
|
||||
- [ ] **[INFRA-002]** CI/CD pipeline enhancement
|
||||
- **Addition:** [What check/stage to add]
|
||||
- **Constitutional alignment:** [Which principle this enforces]
|
||||
|
||||
**⚠️ CRITICAL**: No user story work can begin until this phase is complete
|
||||
### 📋 Technical Debt Tasks
|
||||
|
||||
Examples of foundational tasks (adjust based on your project):
|
||||
- [ ] **[DEBT-001]** Refactor [Component]
|
||||
- **Current problem:** [What makes this debt]
|
||||
- **Proposed solution:** [Refactoring approach]
|
||||
- **Impact:** [What improves after fix]
|
||||
- **Estimate:** [S/M/L/XL]
|
||||
|
||||
- [ ] T004 Setup database schema and migrations framework
|
||||
- [ ] T005 [P] Implement authentication/authorization framework
|
||||
- [ ] T006 [P] Setup API routing and middleware structure
|
||||
- [ ] T007 Create base models/entities that all stories depend on
|
||||
- [ ] T008 Configure error handling and logging infrastructure
|
||||
- [ ] T009 Setup environment configuration management
|
||||
- [ ] **[DEBT-002]** Update dependencies
|
||||
- **Packages:** [List outdated packages]
|
||||
- **Risk assessment:** [Breaking changes?]
|
||||
- **Testing plan:** [How to verify upgrade]
|
||||
|
||||
**Checkpoint**: Foundation ready - user story implementation can now begin in parallel
|
||||
## Task Estimation Guide
|
||||
|
||||
---
|
||||
- **S (Small):** <2 hours, single file, no dependencies
|
||||
- **M (Medium):** 2-4 hours, multiple files, minor dependencies
|
||||
- **L (Large):** 4-8 hours, multiple components, significant testing
|
||||
- **XL (X-Large):** >8 hours, consider breaking down further
|
||||
|
||||
## Completion Checklist
|
||||
|
||||
## Phase 3: User Story 1 - [Title] (Priority: P1) 🎯 MVP
|
||||
Before closing any task, verify:
|
||||
- [ ] Code changes committed with clear message
|
||||
- [ ] Tests written and passing (≥80% coverage for new code)
|
||||
- [ ] Linter/type checker passing
|
||||
- [ ] Documentation updated
|
||||
- [ ] Code review completed
|
||||
- [ ] Constitutional principles satisfied
|
||||
- [ ] Deployed to staging/production
|
||||
|
||||
**Goal**: [Brief description of what this story delivers]
|
||||
## Blocked Tasks
|
||||
|
||||
**Independent Test**: [How to verify this story works on its own]
|
||||
Track tasks waiting on external dependencies:
|
||||
|
||||
### Tests for User Story 1 (OPTIONAL - only if tests requested) ⚠️
|
||||
- **[TASK-XXX]** Task title
|
||||
- **Blocked by:** [Reason/dependency]
|
||||
- **Resolution needed:** [Action to unblock]
|
||||
- **Owner of blocker:** [Person/team]
|
||||
|
||||
> **NOTE: Write these tests FIRST, ensure they FAIL before implementation**
|
||||
## Completed Tasks
|
||||
|
||||
- [ ] T010 [P] [US1] Contract test for [endpoint] in tests/contract/test_[name].py
|
||||
- [ ] T011 [P] [US1] Integration test for [user journey] in tests/integration/test_[name].py
|
||||
Move completed tasks here with completion date:
|
||||
|
||||
### Implementation for User Story 1
|
||||
- ✅ **[TASK-000]** Example completed task (2025-11-01)
|
||||
|
||||
- [ ] T012 [P] [US1] Create [Entity1] model in src/models/[entity1].py
|
||||
- [ ] T013 [P] [US1] Create [Entity2] model in src/models/[entity2].py
|
||||
- [ ] T014 [US1] Implement [Service] in src/services/[service].py (depends on T012, T013)
|
||||
- [ ] T015 [US1] Implement [endpoint/feature] in src/[location]/[file].py
|
||||
- [ ] T016 [US1] Add validation and error handling
|
||||
- [ ] T017 [US1] Add logging for user story 1 operations
|
||||
## Notes & Decisions
|
||||
|
||||
**Checkpoint**: At this point, User Story 1 should be fully functional and testable independently
|
||||
Document important decisions or context that affects multiple tasks:
|
||||
|
||||
---
|
||||
|
||||
## Phase 4: User Story 2 - [Title] (Priority: P2)
|
||||
|
||||
**Goal**: [Brief description of what this story delivers]
|
||||
|
||||
**Independent Test**: [How to verify this story works on its own]
|
||||
|
||||
### Tests for User Story 2 (OPTIONAL - only if tests requested) ⚠️
|
||||
|
||||
- [ ] T018 [P] [US2] Contract test for [endpoint] in tests/contract/test_[name].py
|
||||
- [ ] T019 [P] [US2] Integration test for [user journey] in tests/integration/test_[name].py
|
||||
|
||||
### Implementation for User Story 2
|
||||
|
||||
- [ ] T020 [P] [US2] Create [Entity] model in src/models/[entity].py
|
||||
- [ ] T021 [US2] Implement [Service] in src/services/[service].py
|
||||
- [ ] T022 [US2] Implement [endpoint/feature] in src/[location]/[file].py
|
||||
- [ ] T023 [US2] Integrate with User Story 1 components (if needed)
|
||||
|
||||
**Checkpoint**: At this point, User Stories 1 AND 2 should both work independently
|
||||
|
||||
---
|
||||
|
||||
## Phase 5: User Story 3 - [Title] (Priority: P3)
|
||||
|
||||
**Goal**: [Brief description of what this story delivers]
|
||||
|
||||
**Independent Test**: [How to verify this story works on its own]
|
||||
|
||||
### Tests for User Story 3 (OPTIONAL - only if tests requested) ⚠️
|
||||
|
||||
- [ ] T024 [P] [US3] Contract test for [endpoint] in tests/contract/test_[name].py
|
||||
- [ ] T025 [P] [US3] Integration test for [user journey] in tests/integration/test_[name].py
|
||||
|
||||
### Implementation for User Story 3
|
||||
|
||||
- [ ] T026 [P] [US3] Create [Entity] model in src/models/[entity].py
|
||||
- [ ] T027 [US3] Implement [Service] in src/services/[service].py
|
||||
- [ ] T028 [US3] Implement [endpoint/feature] in src/[location]/[file].py
|
||||
|
||||
**Checkpoint**: All user stories should now be independently functional
|
||||
|
||||
---
|
||||
|
||||
[Add more user story phases as needed, following the same pattern]
|
||||
|
||||
---
|
||||
|
||||
## Phase N: Polish & Cross-Cutting Concerns
|
||||
|
||||
**Purpose**: Improvements that affect multiple user stories
|
||||
|
||||
- [ ] TXXX [P] Documentation updates in docs/
|
||||
- [ ] TXXX Code cleanup and refactoring
|
||||
- [ ] TXXX Performance optimization across all stories
|
||||
- [ ] TXXX [P] Additional unit tests (if requested) in tests/unit/
|
||||
- [ ] TXXX Security hardening
|
||||
- [ ] TXXX Run quickstart.md validation
|
||||
|
||||
---
|
||||
|
||||
## Dependencies & Execution Order
|
||||
|
||||
### Phase Dependencies
|
||||
|
||||
- **Setup (Phase 1)**: No dependencies - can start immediately
|
||||
- **Foundational (Phase 2)**: Depends on Setup completion - BLOCKS all user stories
|
||||
- **User Stories (Phase 3+)**: All depend on Foundational phase completion
|
||||
- User stories can then proceed in parallel (if staffed)
|
||||
- Or sequentially in priority order (P1 → P2 → P3)
|
||||
- **Polish (Final Phase)**: Depends on all desired user stories being complete
|
||||
|
||||
### User Story Dependencies
|
||||
|
||||
- **User Story 1 (P1)**: Can start after Foundational (Phase 2) - No dependencies on other stories
|
||||
- **User Story 2 (P2)**: Can start after Foundational (Phase 2) - May integrate with US1 but should be independently testable
|
||||
- **User Story 3 (P3)**: Can start after Foundational (Phase 2) - May integrate with US1/US2 but should be independently testable
|
||||
|
||||
### Within Each User Story
|
||||
|
||||
- Tests (if included) MUST be written and FAIL before implementation
|
||||
- Models before services
|
||||
- Services before endpoints
|
||||
- Core implementation before integration
|
||||
- Story complete before moving to next priority
|
||||
|
||||
### Parallel Opportunities
|
||||
|
||||
- All Setup tasks marked [P] can run in parallel
|
||||
- All Foundational tasks marked [P] can run in parallel (within Phase 2)
|
||||
- Once Foundational phase completes, all user stories can start in parallel (if team capacity allows)
|
||||
- All tests for a user story marked [P] can run in parallel
|
||||
- Models within a story marked [P] can run in parallel
|
||||
- Different user stories can be worked on in parallel by different team members
|
||||
|
||||
---
|
||||
|
||||
## Parallel Example: User Story 1
|
||||
|
||||
```bash
|
||||
# Launch all tests for User Story 1 together (if tests requested):
|
||||
Task: "Contract test for [endpoint] in tests/contract/test_[name].py"
|
||||
Task: "Integration test for [user journey] in tests/integration/test_[name].py"
|
||||
|
||||
# Launch all models for User Story 1 together:
|
||||
Task: "Create [Entity1] model in src/models/[entity1].py"
|
||||
Task: "Create [Entity2] model in src/models/[entity2].py"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Implementation Strategy
|
||||
|
||||
### MVP First (User Story 1 Only)
|
||||
|
||||
1. Complete Phase 1: Setup
|
||||
2. Complete Phase 2: Foundational (CRITICAL - blocks all stories)
|
||||
3. Complete Phase 3: User Story 1
|
||||
4. **STOP and VALIDATE**: Test User Story 1 independently
|
||||
5. Deploy/demo if ready
|
||||
|
||||
### Incremental Delivery
|
||||
|
||||
1. Complete Setup + Foundational → Foundation ready
|
||||
2. Add User Story 1 → Test independently → Deploy/Demo (MVP!)
|
||||
3. Add User Story 2 → Test independently → Deploy/Demo
|
||||
4. Add User Story 3 → Test independently → Deploy/Demo
|
||||
5. Each story adds value without breaking previous stories
|
||||
|
||||
### Parallel Team Strategy
|
||||
|
||||
With multiple developers:
|
||||
|
||||
1. Team completes Setup + Foundational together
|
||||
2. Once Foundational is done:
|
||||
- Developer A: User Story 1
|
||||
- Developer B: User Story 2
|
||||
- Developer C: User Story 3
|
||||
3. Stories complete and integrate independently
|
||||
|
||||
---
|
||||
|
||||
## Notes
|
||||
|
||||
- [P] tasks = different files, no dependencies
|
||||
- [Story] label maps task to specific user story for traceability
|
||||
- Each user story should be independently completable and testable
|
||||
- Verify tests fail before implementing
|
||||
- Commit after each task or logical group
|
||||
- Stop at any checkpoint to validate story independently
|
||||
- Avoid: vague tasks, same file conflicts, cross-story dependencies that break independence
|
||||
- **[2025-11-02]** Decision about [topic]: [What was decided and why]
|
||||
|
||||
248
README.md
Normal file
248
README.md
Normal file
@@ -0,0 +1,248 @@
|
||||
# webref
|
||||
|
||||
A Python project for web reference management, built with quality and maintainability as core values.
|
||||
|
||||
## Project Constitution
|
||||
|
||||
This project follows a formal constitution that establishes binding principles for all development work. The constitution ensures consistent quality, testing discipline, user experience, and performance across the codebase.
|
||||
|
||||
**Constitutional Principles:**
|
||||
1. **Code Quality & Maintainability** - Clear, consistent, maintainable code with proper typing and documentation
|
||||
2. **Testing Discipline** - ≥80% coverage, automated testing for all functionality
|
||||
3. **User Experience Consistency** - Intuitive, accessible, consistent interfaces
|
||||
4. **Performance & Efficiency** - Performance-first design with bounded resources
|
||||
|
||||
📖 **Full constitution:** [`.specify/memory/constitution.md`](.specify/memory/constitution.md)
|
||||
|
||||
## Documentation
|
||||
|
||||
- 📚 **[Getting Started Guide](docs/getting-started.md)** - Complete setup walkthrough
|
||||
- 🔧 **[Nix Services](docs/development/nix-services.md)** - Service management
|
||||
- 📋 **[Specification](specs/001-reference-board-viewer/spec.md)** - Requirements & design
|
||||
- 📊 **[Milestones](docs/milestones/)** - Phase completion reports
|
||||
|
||||
## Development Environment
|
||||
|
||||
This project uses Nix flakes for reproducible development environments:
|
||||
|
||||
```bash
|
||||
# Enter development shell (from flake.nix)
|
||||
nix develop
|
||||
|
||||
# Or use direnv for automatic activation
|
||||
direnv allow # .envrc already configured
|
||||
```
|
||||
|
||||
**Included tools:**
|
||||
- Python 3.13 with all backend dependencies (FastAPI, SQLAlchemy, pytest, psycopg2, etc.)
|
||||
- Node.js + npm for frontend development
|
||||
- PostgreSQL client tools
|
||||
- MinIO client
|
||||
- Ruff (Python linter/formatter)
|
||||
- All project dependencies from flake.nix
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
# 1. Enter Nix development environment
|
||||
nix develop
|
||||
|
||||
# 2. Start development services (PostgreSQL + MinIO)
|
||||
./scripts/dev-services.sh start
|
||||
|
||||
# 3. Setup backend (first time only)
|
||||
cd backend
|
||||
alembic upgrade head
|
||||
cd ..
|
||||
|
||||
# 4. Start backend (Terminal 1)
|
||||
cd backend
|
||||
uvicorn app.main:app --reload
|
||||
|
||||
# 5. Start frontend (Terminal 2)
|
||||
cd frontend
|
||||
npm install # first time only
|
||||
npm run dev
|
||||
|
||||
# 6. Test authentication (Terminal 3)
|
||||
./scripts/test-auth.sh
|
||||
```
|
||||
|
||||
**Access:**
|
||||
- Frontend: http://localhost:5173
|
||||
- Backend API Docs: http://localhost:8000/docs
|
||||
- MinIO Console: http://localhost:9001
|
||||
- PostgreSQL: `psql -h localhost -U webref webref`
|
||||
|
||||
## Code Quality & Linting
|
||||
|
||||
### Unified Linting (All Languages)
|
||||
|
||||
```bash
|
||||
# Check all code (Python + TypeScript/Svelte)
|
||||
./scripts/lint.sh
|
||||
# OR using nix:
|
||||
nix run .#lint
|
||||
|
||||
# Auto-fix all issues
|
||||
nix run .#lint-fix
|
||||
```
|
||||
|
||||
### Git Hooks (Automatic)
|
||||
|
||||
Install git hooks to run linting automatically:
|
||||
|
||||
```bash
|
||||
./scripts/install-hooks.sh
|
||||
```
|
||||
|
||||
This installs:
|
||||
- **pre-commit**: Runs linting before each commit
|
||||
- **pre-push**: Runs tests before push (optional)
|
||||
|
||||
To skip hooks when committing:
|
||||
```bash
|
||||
git commit --no-verify
|
||||
```
|
||||
|
||||
### Manual Linting
|
||||
|
||||
**Backend (Python):**
|
||||
```bash
|
||||
cd backend
|
||||
ruff check app/ # Check for issues
|
||||
ruff check --fix app/ # Auto-fix issues
|
||||
ruff format app/ # Format code
|
||||
```
|
||||
|
||||
**Frontend (TypeScript/Svelte):**
|
||||
```bash
|
||||
cd frontend
|
||||
npm run lint # ESLint check
|
||||
npm run check # TypeScript check
|
||||
npx prettier --check src/ # Prettier check
|
||||
npx prettier --write src/ # Auto-format
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
webref/
|
||||
├── .specify/ # Project governance and templates
|
||||
│ ├── memory/
|
||||
│ │ └── constitution.md # Project constitution
|
||||
│ └── templates/
|
||||
│ ├── plan-template.md # Planning document template
|
||||
│ ├── spec-template.md # Specification template
|
||||
│ ├── tasks-template.md # Task tracking template
|
||||
│ └── commands/
|
||||
│ └── constitution.md # Constitution amendment workflow
|
||||
├── backend/ # FastAPI backend application
|
||||
│ ├── app/ # Application code
|
||||
│ ├── tests/ # pytest test suite
|
||||
│ └── pyproject.toml # Python dependencies
|
||||
├── frontend/ # Svelte + Konva.js frontend
|
||||
│ ├── src/ # Application code
|
||||
│ ├── tests/ # Vitest test suite
|
||||
│ └── package.json # Node dependencies
|
||||
├── nixos/ # NixOS configuration and tests
|
||||
│ ├── tests.nix # NixOS VM integration tests
|
||||
│ └── gitea-runner.nix # Gitea Actions runner config
|
||||
├── flake.nix # Nix flake (dependencies & dev shell)
|
||||
├── .envrc # direnv configuration
|
||||
└── README.md # This file
|
||||
```
|
||||
|
||||
## Using the Specification System
|
||||
|
||||
### Planning a Feature
|
||||
|
||||
1. Copy `.specify/templates/plan-template.md` to `.specify/plans/[feature-name].md`
|
||||
2. Fill in objectives, scope, and technical approach
|
||||
3. Complete the Constitution Alignment Check to verify adherence to principles
|
||||
4. Review with team before proceeding to specification
|
||||
|
||||
### Writing a Specification
|
||||
|
||||
1. Copy `.specify/templates/spec-template.md` to `.specify/specs/[feature-name].md`
|
||||
2. Define functional and non-functional requirements
|
||||
3. Each requirement must address constitutional alignment (testing, UX, performance)
|
||||
4. Include clear acceptance criteria for validation
|
||||
|
||||
### Managing Tasks
|
||||
|
||||
1. Copy `.specify/templates/tasks-template.md` to `.specify/tasks/[sprint-name].md`
|
||||
2. Organize tasks by constitutional principle category:
|
||||
- 🏗️ Implementation (Code Quality)
|
||||
- 🧪 Testing (Testing Discipline)
|
||||
- 👤 User Experience (UX Consistency)
|
||||
- ⚡ Performance (Performance & Efficiency)
|
||||
3. Complete the checklist before closing any task
|
||||
|
||||
### Amending the Constitution
|
||||
|
||||
1. Create a pull request to `.specify/memory/constitution.md`
|
||||
2. Include rationale and impact analysis
|
||||
3. Update version number (MAJOR.MINOR.PATCH)
|
||||
4. Update all dependent templates
|
||||
5. Prepend Sync Impact Report
|
||||
|
||||
## Code Quality Standards
|
||||
|
||||
All code must meet these requirements before merge:
|
||||
|
||||
- ✅ Linter passing (no errors/warnings)
|
||||
- ✅ Type hints on all public APIs
|
||||
- ✅ Tests passing with ≥80% coverage
|
||||
- ✅ Code review approved
|
||||
- ✅ Constitution principles verified
|
||||
|
||||
## Testing
|
||||
|
||||
### Unit Tests
|
||||
|
||||
```bash
|
||||
# Backend tests
|
||||
cd backend && pytest --cov=app --cov-report=html
|
||||
|
||||
# Frontend tests
|
||||
cd frontend && npm test
|
||||
|
||||
# Coverage must be ≥80% per Constitutional Principle 2
|
||||
```
|
||||
|
||||
### NixOS VM Integration Tests
|
||||
|
||||
```bash
|
||||
# Run all integration tests in isolated VMs
|
||||
nix flake check
|
||||
|
||||
# Run specific test
|
||||
nix build .#checks.x86_64-linux.backend-integration
|
||||
nix build .#checks.x86_64-linux.full-stack
|
||||
nix build .#checks.x86_64-linux.performance
|
||||
nix build .#checks.x86_64-linux.security
|
||||
|
||||
# Interactive debugging
|
||||
nix build .#checks.x86_64-linux.backend-integration.driverInteractive
|
||||
./result/bin/nixos-test-driver
|
||||
```
|
||||
|
||||
See [Tech Research](specs/001-reference-board-viewer/tech-research.md) for CI/testing architecture details.
|
||||
|
||||
## Contributing
|
||||
|
||||
1. Read the [constitution](.specify/memory/constitution.md)
|
||||
2. Follow the planning → specification → implementation flow
|
||||
3. Ensure all code meets constitutional principles
|
||||
4. Write tests first (TDD encouraged)
|
||||
5. Request code review
|
||||
|
||||
## License
|
||||
|
||||
[License information to be added]
|
||||
|
||||
## Contact
|
||||
|
||||
[Contact information to be added]
|
||||
|
||||
115
backend/alembic.ini
Normal file
115
backend/alembic.ini
Normal file
@@ -0,0 +1,115 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d%%(second).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# format using "ruff" - use the exec runner, execute a binary
|
||||
hooks = ruff
|
||||
ruff.type = exec
|
||||
ruff.executable = ruff
|
||||
ruff.options = format REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
|
||||
91
backend/alembic/env.py
Normal file
91
backend/alembic/env.py
Normal file
@@ -0,0 +1,91 @@
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
from pathlib import Path
|
||||
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Add parent directory to path to import app modules
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
# Import all models here for autogenerate to detect them
|
||||
from app.database.base import Base # noqa
|
||||
from app.database.models import * # noqa
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# Get database URL from environment or config
|
||||
database_url = os.getenv("DATABASE_URL")
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
|
||||
27
backend/alembic/script.py.mako
Normal file
27
backend/alembic/script.py.mako
Normal file
@@ -0,0 +1,27 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
||||
180
backend/alembic/versions/001_initial_schema.py
Normal file
180
backend/alembic/versions/001_initial_schema.py
Normal file
@@ -0,0 +1,180 @@
|
||||
"""001_initial_schema
|
||||
|
||||
Revision ID: 001_initial_schema
|
||||
Revises:
|
||||
Create Date: 2025-11-02
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '001_initial_schema'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Enable UUID extension
|
||||
op.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"')
|
||||
|
||||
# Create users table
|
||||
op.create_table(
|
||||
'users',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True, server_default=sa.text('uuid_generate_v4()')),
|
||||
sa.Column('email', sa.String(255), nullable=False, unique=True),
|
||||
sa.Column('password_hash', sa.String(255), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('NOW()')),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('NOW()')),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False, server_default=sa.text('TRUE')),
|
||||
sa.CheckConstraint('email = LOWER(email)', name='check_email_lowercase')
|
||||
)
|
||||
op.create_index('idx_users_created_at', 'users', ['created_at'])
|
||||
op.create_index('idx_users_email', 'users', ['email'], unique=True)
|
||||
|
||||
# Create boards table
|
||||
op.create_table(
|
||||
'boards',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True, server_default=sa.text('uuid_generate_v4()')),
|
||||
sa.Column('user_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False),
|
||||
sa.Column('title', sa.String(255), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('viewport_state', postgresql.JSONB(), nullable=False, server_default=sa.text("'{\"x\": 0, \"y\": 0, \"zoom\": 1.0, \"rotation\": 0}'::jsonb")),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('NOW()')),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('NOW()')),
|
||||
sa.Column('is_deleted', sa.Boolean(), nullable=False, server_default=sa.text('FALSE')),
|
||||
sa.CheckConstraint('LENGTH(title) > 0', name='check_title_not_empty')
|
||||
)
|
||||
op.create_index('idx_boards_user_created', 'boards', ['user_id', 'created_at'])
|
||||
op.create_index('idx_boards_updated', 'boards', ['updated_at'])
|
||||
op.execute('CREATE INDEX idx_boards_viewport ON boards USING GIN (viewport_state)')
|
||||
|
||||
# Create images table
|
||||
op.create_table(
|
||||
'images',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True, server_default=sa.text('uuid_generate_v4()')),
|
||||
sa.Column('user_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False),
|
||||
sa.Column('filename', sa.String(255), nullable=False),
|
||||
sa.Column('storage_path', sa.String(512), nullable=False),
|
||||
sa.Column('file_size', sa.BigInteger(), nullable=False),
|
||||
sa.Column('mime_type', sa.String(100), nullable=False),
|
||||
sa.Column('width', sa.Integer(), nullable=False),
|
||||
sa.Column('height', sa.Integer(), nullable=False),
|
||||
sa.Column('image_metadata', postgresql.JSONB(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('NOW()')),
|
||||
sa.Column('reference_count', sa.Integer(), nullable=False, server_default=sa.text('0')),
|
||||
sa.CheckConstraint('file_size > 0 AND file_size <= 52428800', name='check_file_size'),
|
||||
sa.CheckConstraint('width > 0 AND width <= 10000', name='check_width'),
|
||||
sa.CheckConstraint('height > 0 AND height <= 10000', name='check_height')
|
||||
)
|
||||
op.create_index('idx_images_user_created', 'images', ['user_id', 'created_at'])
|
||||
op.create_index('idx_images_filename', 'images', ['filename'])
|
||||
op.execute('CREATE INDEX idx_images_metadata ON images USING GIN (image_metadata)')
|
||||
|
||||
# Create groups table
|
||||
op.create_table(
|
||||
'groups',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True, server_default=sa.text('uuid_generate_v4()')),
|
||||
sa.Column('board_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('boards.id', ondelete='CASCADE'), nullable=False),
|
||||
sa.Column('name', sa.String(255), nullable=False),
|
||||
sa.Column('color', sa.String(7), nullable=False),
|
||||
sa.Column('annotation', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('NOW()')),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('NOW()')),
|
||||
sa.CheckConstraint('LENGTH(name) > 0', name='check_name_not_empty'),
|
||||
sa.CheckConstraint("color ~ '^#[0-9A-Fa-f]{6}$'", name='check_color_hex')
|
||||
)
|
||||
op.create_index('idx_groups_board_created', 'groups', ['board_id', 'created_at'])
|
||||
|
||||
# Create board_images table
|
||||
op.create_table(
|
||||
'board_images',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True, server_default=sa.text('uuid_generate_v4()')),
|
||||
sa.Column('board_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('boards.id', ondelete='CASCADE'), nullable=False),
|
||||
sa.Column('image_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('images.id', ondelete='CASCADE'), nullable=False),
|
||||
sa.Column('position', postgresql.JSONB(), nullable=False),
|
||||
sa.Column('transformations', postgresql.JSONB(), nullable=False, server_default=sa.text("'{\"scale\": 1.0, \"rotation\": 0, \"opacity\": 1.0, \"flipped_h\": false, \"flipped_v\": false, \"greyscale\": false}'::jsonb")),
|
||||
sa.Column('z_order', sa.Integer(), nullable=False, server_default=sa.text('0')),
|
||||
sa.Column('group_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('groups.id', ondelete='SET NULL'), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('NOW()')),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('NOW()'))
|
||||
)
|
||||
op.create_unique_constraint('uq_board_image', 'board_images', ['board_id', 'image_id'])
|
||||
op.create_index('idx_board_images_board_z', 'board_images', ['board_id', 'z_order'])
|
||||
op.create_index('idx_board_images_group', 'board_images', ['group_id'])
|
||||
op.execute('CREATE INDEX idx_board_images_position ON board_images USING GIN (position)')
|
||||
op.execute('CREATE INDEX idx_board_images_transformations ON board_images USING GIN (transformations)')
|
||||
|
||||
# Create share_links table
|
||||
op.create_table(
|
||||
'share_links',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True, server_default=sa.text('uuid_generate_v4()')),
|
||||
sa.Column('board_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('boards.id', ondelete='CASCADE'), nullable=False),
|
||||
sa.Column('token', sa.String(64), nullable=False, unique=True),
|
||||
sa.Column('permission_level', sa.String(20), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('NOW()')),
|
||||
sa.Column('expires_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('last_accessed_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('access_count', sa.Integer(), nullable=False, server_default=sa.text('0')),
|
||||
sa.Column('is_revoked', sa.Boolean(), nullable=False, server_default=sa.text('FALSE')),
|
||||
sa.CheckConstraint("permission_level IN ('view-only', 'view-comment')", name='check_permission_level')
|
||||
)
|
||||
op.create_unique_constraint('uq_share_links_token', 'share_links', ['token'])
|
||||
op.create_index('idx_share_links_board_revoked', 'share_links', ['board_id', 'is_revoked'])
|
||||
op.create_index('idx_share_links_expires_revoked', 'share_links', ['expires_at', 'is_revoked'])
|
||||
|
||||
# Create comments table
|
||||
op.create_table(
|
||||
'comments',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True, server_default=sa.text('uuid_generate_v4()')),
|
||||
sa.Column('board_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('boards.id', ondelete='CASCADE'), nullable=False),
|
||||
sa.Column('share_link_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('share_links.id', ondelete='SET NULL'), nullable=True),
|
||||
sa.Column('author_name', sa.String(100), nullable=False),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('position', postgresql.JSONB(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('NOW()')),
|
||||
sa.Column('is_deleted', sa.Boolean(), nullable=False, server_default=sa.text('FALSE')),
|
||||
sa.CheckConstraint('LENGTH(content) > 0 AND LENGTH(content) <= 5000', name='check_content_length')
|
||||
)
|
||||
op.create_index('idx_comments_board_created', 'comments', ['board_id', 'created_at'])
|
||||
op.create_index('idx_comments_share_link', 'comments', ['share_link_id'])
|
||||
|
||||
# Create triggers for updated_at
|
||||
op.execute("""
|
||||
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = NOW();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ language 'plpgsql';
|
||||
""")
|
||||
|
||||
op.execute('CREATE TRIGGER update_users_updated_at BEFORE UPDATE ON users FOR EACH ROW EXECUTE FUNCTION update_updated_at_column()')
|
||||
op.execute('CREATE TRIGGER update_boards_updated_at BEFORE UPDATE ON boards FOR EACH ROW EXECUTE FUNCTION update_updated_at_column()')
|
||||
op.execute('CREATE TRIGGER update_groups_updated_at BEFORE UPDATE ON groups FOR EACH ROW EXECUTE FUNCTION update_updated_at_column()')
|
||||
op.execute('CREATE TRIGGER update_board_images_updated_at BEFORE UPDATE ON board_images FOR EACH ROW EXECUTE FUNCTION update_updated_at_column()')
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop triggers
|
||||
op.execute('DROP TRIGGER IF EXISTS update_board_images_updated_at ON board_images')
|
||||
op.execute('DROP TRIGGER IF EXISTS update_groups_updated_at ON groups')
|
||||
op.execute('DROP TRIGGER IF EXISTS update_boards_updated_at ON boards')
|
||||
op.execute('DROP TRIGGER IF EXISTS update_users_updated_at ON users')
|
||||
op.execute('DROP FUNCTION IF EXISTS update_updated_at_column()')
|
||||
|
||||
# Drop tables in reverse order
|
||||
op.drop_table('comments')
|
||||
op.drop_table('share_links')
|
||||
op.drop_table('board_images')
|
||||
op.drop_table('groups')
|
||||
op.drop_table('images')
|
||||
op.drop_table('boards')
|
||||
op.drop_table('users')
|
||||
|
||||
# Drop extension
|
||||
op.execute('DROP EXTENSION IF EXISTS "uuid-ossp"')
|
||||
|
||||
3
backend/app/__init__.py
Normal file
3
backend/app/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""Reference Board Viewer - Backend API."""
|
||||
|
||||
__version__ = "1.0.0"
|
||||
1
backend/app/api/__init__.py
Normal file
1
backend/app/api/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""API endpoints."""
|
||||
97
backend/app/api/auth.py
Normal file
97
backend/app/api/auth.py
Normal file
@@ -0,0 +1,97 @@
|
||||
"""Authentication endpoints."""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.auth.jwt import create_access_token
|
||||
from app.auth.repository import UserRepository
|
||||
from app.auth.schemas import TokenResponse, UserCreate, UserLogin, UserResponse
|
||||
from app.auth.security import validate_password_strength, verify_password
|
||||
from app.core.deps import get_current_user, get_db
|
||||
from app.database.models.user import User
|
||||
|
||||
router = APIRouter(prefix="/auth", tags=["auth"])
|
||||
|
||||
|
||||
@router.post("/register", response_model=UserResponse, status_code=status.HTTP_201_CREATED)
|
||||
def register_user(user_data: UserCreate, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Register a new user.
|
||||
|
||||
Args:
|
||||
user_data: User registration data
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Created user information
|
||||
|
||||
Raises:
|
||||
HTTPException: If email already exists or password is weak
|
||||
"""
|
||||
repo = UserRepository(db)
|
||||
|
||||
# Check if email already exists
|
||||
if repo.email_exists(user_data.email):
|
||||
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail="Email already registered")
|
||||
|
||||
# Validate password strength
|
||||
is_valid, error_message = validate_password_strength(user_data.password)
|
||||
if not is_valid:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=error_message)
|
||||
|
||||
# Create user
|
||||
user = repo.create_user(email=user_data.email, password=user_data.password)
|
||||
|
||||
return UserResponse.model_validate(user)
|
||||
|
||||
|
||||
@router.post("/login", response_model=TokenResponse)
|
||||
def login_user(login_data: UserLogin, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Login user and return JWT token.
|
||||
|
||||
Args:
|
||||
login_data: Login credentials
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
JWT access token and user information
|
||||
|
||||
Raises:
|
||||
HTTPException: If credentials are invalid
|
||||
"""
|
||||
repo = UserRepository(db)
|
||||
|
||||
# Get user by email
|
||||
user = repo.get_user_by_email(login_data.email)
|
||||
|
||||
# Verify user exists and password is correct
|
||||
if not user or not verify_password(login_data.password, user.password_hash):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Incorrect email or password",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
# Check if user is active
|
||||
if not user.is_active:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User account is deactivated")
|
||||
|
||||
# Create access token
|
||||
access_token = create_access_token(user_id=user.id, email=user.email)
|
||||
|
||||
return TokenResponse(access_token=access_token, token_type="bearer", user=UserResponse.model_validate(user))
|
||||
|
||||
|
||||
@router.get("/me", response_model=UserResponse)
|
||||
def get_current_user_info(current_user: User = Depends(get_current_user)):
|
||||
"""
|
||||
Get current authenticated user information.
|
||||
|
||||
Args:
|
||||
current_user: Current authenticated user (from JWT)
|
||||
|
||||
Returns:
|
||||
Current user information
|
||||
"""
|
||||
return UserResponse.model_validate(current_user)
|
||||
222
backend/app/api/boards.py
Normal file
222
backend/app/api/boards.py
Normal file
@@ -0,0 +1,222 @@
|
||||
"""Board management API endpoints."""
|
||||
|
||||
from typing import Annotated
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.boards.repository import BoardRepository
|
||||
from app.boards.schemas import BoardCreate, BoardDetail, BoardSummary, BoardUpdate, ViewportStateUpdate
|
||||
from app.core.deps import get_current_user, get_db
|
||||
from app.database.models.user import User
|
||||
|
||||
router = APIRouter(prefix="/boards", tags=["boards"])
|
||||
|
||||
|
||||
@router.post("", response_model=BoardDetail, status_code=status.HTTP_201_CREATED)
|
||||
def create_board(
|
||||
board_data: BoardCreate,
|
||||
current_user: Annotated[User, Depends(get_current_user)],
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
):
|
||||
"""
|
||||
Create a new board.
|
||||
|
||||
Args:
|
||||
board_data: Board creation data
|
||||
current_user: Current authenticated user
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Created board details
|
||||
"""
|
||||
repo = BoardRepository(db)
|
||||
|
||||
board = repo.create_board(
|
||||
user_id=current_user.id,
|
||||
title=board_data.title,
|
||||
description=board_data.description,
|
||||
)
|
||||
|
||||
return BoardDetail.model_validate(board)
|
||||
|
||||
|
||||
@router.get("", response_model=dict)
|
||||
def list_boards(
|
||||
current_user: Annotated[User, Depends(get_current_user)],
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
limit: Annotated[int, Query(ge=1, le=100)] = 50,
|
||||
offset: Annotated[int, Query(ge=0)] = 0,
|
||||
):
|
||||
"""
|
||||
List all boards for the current user.
|
||||
|
||||
Args:
|
||||
current_user: Current authenticated user
|
||||
db: Database session
|
||||
limit: Maximum number of boards to return
|
||||
offset: Number of boards to skip
|
||||
|
||||
Returns:
|
||||
Dictionary with boards list, total count, limit, and offset
|
||||
"""
|
||||
repo = BoardRepository(db)
|
||||
|
||||
boards, total = repo.get_user_boards(user_id=current_user.id, limit=limit, offset=offset)
|
||||
|
||||
return {
|
||||
"boards": [BoardSummary.model_validate(board) for board in boards],
|
||||
"total": total,
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{board_id}", response_model=BoardDetail)
|
||||
def get_board(
|
||||
board_id: UUID,
|
||||
current_user: Annotated[User, Depends(get_current_user)],
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
):
|
||||
"""
|
||||
Get board details by ID.
|
||||
|
||||
Args:
|
||||
board_id: Board UUID
|
||||
current_user: Current authenticated user
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Board details
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if board not found or not owned by user
|
||||
"""
|
||||
repo = BoardRepository(db)
|
||||
|
||||
board = repo.get_board_by_id(board_id=board_id, user_id=current_user.id)
|
||||
|
||||
if not board:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Board {board_id} not found",
|
||||
)
|
||||
|
||||
return BoardDetail.model_validate(board)
|
||||
|
||||
|
||||
@router.patch("/{board_id}", response_model=BoardDetail)
|
||||
def update_board(
|
||||
board_id: UUID,
|
||||
board_data: BoardUpdate,
|
||||
current_user: Annotated[User, Depends(get_current_user)],
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
):
|
||||
"""
|
||||
Update board metadata.
|
||||
|
||||
Args:
|
||||
board_id: Board UUID
|
||||
board_data: Board update data
|
||||
current_user: Current authenticated user
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Updated board details
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if board not found or not owned by user
|
||||
"""
|
||||
repo = BoardRepository(db)
|
||||
|
||||
# Convert viewport_state to dict if provided
|
||||
viewport_dict = None
|
||||
if board_data.viewport_state:
|
||||
viewport_dict = board_data.viewport_state.model_dump()
|
||||
|
||||
board = repo.update_board(
|
||||
board_id=board_id,
|
||||
user_id=current_user.id,
|
||||
title=board_data.title,
|
||||
description=board_data.description,
|
||||
viewport_state=viewport_dict,
|
||||
)
|
||||
|
||||
if not board:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Board {board_id} not found",
|
||||
)
|
||||
|
||||
return BoardDetail.model_validate(board)
|
||||
|
||||
|
||||
@router.patch("/{board_id}/viewport", status_code=status.HTTP_204_NO_CONTENT)
|
||||
def update_viewport(
|
||||
board_id: UUID,
|
||||
viewport_data: ViewportStateUpdate,
|
||||
current_user: Annotated[User, Depends(get_current_user)],
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
):
|
||||
"""
|
||||
Update board viewport state only (optimized for frequent updates).
|
||||
|
||||
This endpoint is designed for high-frequency viewport state updates
|
||||
(debounced pan/zoom/rotate changes) with minimal overhead.
|
||||
|
||||
Args:
|
||||
board_id: Board UUID
|
||||
viewport_data: Viewport state data
|
||||
current_user: Current authenticated user
|
||||
db: Database session
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if board not found or not owned by user
|
||||
"""
|
||||
repo = BoardRepository(db)
|
||||
|
||||
# Convert viewport data to dict
|
||||
viewport_dict = viewport_data.model_dump()
|
||||
|
||||
board = repo.update_board(
|
||||
board_id=board_id,
|
||||
user_id=current_user.id,
|
||||
title=None,
|
||||
description=None,
|
||||
viewport_state=viewport_dict,
|
||||
)
|
||||
|
||||
if not board:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Board {board_id} not found",
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/{board_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
def delete_board(
|
||||
board_id: UUID,
|
||||
current_user: Annotated[User, Depends(get_current_user)],
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
):
|
||||
"""
|
||||
Delete a board (soft delete).
|
||||
|
||||
Args:
|
||||
board_id: Board UUID
|
||||
current_user: Current authenticated user
|
||||
db: Database session
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if board not found or not owned by user
|
||||
"""
|
||||
repo = BoardRepository(db)
|
||||
|
||||
success = repo.delete_board(board_id=board_id, user_id=current_user.id)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Board {board_id} not found",
|
||||
)
|
||||
128
backend/app/api/export.py
Normal file
128
backend/app/api/export.py
Normal file
@@ -0,0 +1,128 @@
|
||||
"""Export API endpoints for downloading and exporting images."""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from fastapi.responses import StreamingResponse
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.core.deps import get_current_user, get_db
|
||||
from app.database.models.board import Board
|
||||
from app.database.models.board_image import BoardImage
|
||||
from app.database.models.image import Image
|
||||
from app.database.models.user import User
|
||||
from app.images.download import download_single_image
|
||||
from app.images.export_composite import create_composite_export
|
||||
from app.images.export_zip import create_zip_export
|
||||
|
||||
router = APIRouter(tags=["export"])
|
||||
|
||||
|
||||
@router.get("/images/{image_id}/download")
|
||||
async def download_image(
|
||||
image_id: UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
) -> StreamingResponse:
|
||||
"""
|
||||
Download a single image.
|
||||
|
||||
Only the image owner can download it.
|
||||
"""
|
||||
# Verify image exists and user owns it
|
||||
image = db.query(Image).filter(Image.id == image_id, Image.user_id == current_user.id).first()
|
||||
|
||||
if image is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Image not found or access denied",
|
||||
)
|
||||
|
||||
return await download_single_image(image.storage_path, image.filename)
|
||||
|
||||
|
||||
@router.get("/boards/{board_id}/export/zip")
|
||||
def export_board_zip(
|
||||
board_id: UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
) -> StreamingResponse:
|
||||
"""
|
||||
Export all images from a board as a ZIP file.
|
||||
|
||||
Only the board owner can export it.
|
||||
"""
|
||||
# Verify board exists and user owns it
|
||||
board = db.query(Board).filter(Board.id == board_id, Board.user_id == current_user.id).first()
|
||||
|
||||
if board is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Board not found or access denied",
|
||||
)
|
||||
|
||||
return create_zip_export(str(board_id), db)
|
||||
|
||||
|
||||
@router.get("/boards/{board_id}/export/composite")
|
||||
def export_board_composite(
|
||||
board_id: UUID,
|
||||
scale: float = Query(1.0, ge=0.5, le=4.0, description="Resolution scale (0.5x to 4x)"),
|
||||
format: str = Query("PNG", regex="^(PNG|JPEG)$", description="Output format (PNG or JPEG)"),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
) -> StreamingResponse:
|
||||
"""
|
||||
Export board as a single composite image showing the layout.
|
||||
|
||||
Only the board owner can export it.
|
||||
|
||||
Args:
|
||||
scale: Resolution multiplier (0.5x, 1x, 2x, 4x)
|
||||
format: Output format (PNG or JPEG)
|
||||
"""
|
||||
# Verify board exists and user owns it
|
||||
board = db.query(Board).filter(Board.id == board_id, Board.user_id == current_user.id).first()
|
||||
|
||||
if board is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Board not found or access denied",
|
||||
)
|
||||
|
||||
return create_composite_export(str(board_id), db, scale=scale, format=format)
|
||||
|
||||
|
||||
@router.get("/boards/{board_id}/export/info")
|
||||
def get_export_info(
|
||||
board_id: UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
) -> dict:
|
||||
"""
|
||||
Get information about board export (image count, estimated size).
|
||||
|
||||
Useful for showing progress estimates.
|
||||
"""
|
||||
# Verify board exists and user owns it
|
||||
board = db.query(Board).filter(Board.id == board_id, Board.user_id == current_user.id).first()
|
||||
|
||||
if board is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Board not found or access denied",
|
||||
)
|
||||
|
||||
# Count images and calculate estimated size
|
||||
images = (
|
||||
db.query(Image).join(BoardImage, BoardImage.image_id == Image.id).filter(BoardImage.board_id == board_id).all()
|
||||
)
|
||||
|
||||
total_size = sum(img.file_size for img in images)
|
||||
|
||||
return {
|
||||
"board_id": str(board_id),
|
||||
"image_count": len(images),
|
||||
"total_size_bytes": total_size,
|
||||
"estimated_zip_size_bytes": int(total_size * 0.95), # ZIP usually has small overhead
|
||||
}
|
||||
216
backend/app/api/groups.py
Normal file
216
backend/app/api/groups.py
Normal file
@@ -0,0 +1,216 @@
|
||||
"""Group management API endpoints."""
|
||||
|
||||
from typing import Annotated
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.boards.repository import BoardRepository
|
||||
from app.boards.schemas import GroupCreate, GroupResponse, GroupUpdate
|
||||
from app.core.deps import get_current_user, get_db
|
||||
from app.database.models.user import User
|
||||
|
||||
router = APIRouter(prefix="/boards/{board_id}/groups", tags=["groups"])
|
||||
|
||||
|
||||
@router.post("", response_model=GroupResponse, status_code=status.HTTP_201_CREATED)
|
||||
def create_group(
|
||||
board_id: UUID,
|
||||
group_data: GroupCreate,
|
||||
current_user: Annotated[User, Depends(get_current_user)],
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
):
|
||||
"""
|
||||
Create a new group on a board.
|
||||
|
||||
Assigns the specified images to the group.
|
||||
"""
|
||||
repo = BoardRepository(db)
|
||||
|
||||
# Verify board ownership
|
||||
board = repo.get_board_by_id(board_id, current_user.id)
|
||||
if not board:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Board not found",
|
||||
)
|
||||
|
||||
# Create group
|
||||
group = repo.create_group(
|
||||
board_id=board_id,
|
||||
name=group_data.name,
|
||||
color=group_data.color,
|
||||
annotation=group_data.annotation,
|
||||
image_ids=group_data.image_ids,
|
||||
)
|
||||
|
||||
# Calculate member count
|
||||
response = GroupResponse.model_validate(group)
|
||||
response.member_count = len(group_data.image_ids)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@router.get("", response_model=list[GroupResponse])
|
||||
def list_groups(
|
||||
board_id: UUID,
|
||||
current_user: Annotated[User, Depends(get_current_user)],
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
):
|
||||
"""
|
||||
List all groups on a board.
|
||||
|
||||
Returns groups with member counts.
|
||||
"""
|
||||
repo = BoardRepository(db)
|
||||
|
||||
# Verify board ownership
|
||||
board = repo.get_board_by_id(board_id, current_user.id)
|
||||
if not board:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Board not found",
|
||||
)
|
||||
|
||||
# Get groups
|
||||
groups = repo.get_board_groups(board_id)
|
||||
|
||||
# Convert to response with member counts
|
||||
from sqlalchemy import func, select
|
||||
|
||||
from app.database.models.board_image import BoardImage
|
||||
|
||||
responses = []
|
||||
for group in groups:
|
||||
# Count members
|
||||
count_stmt = select(func.count(BoardImage.id)).where(BoardImage.group_id == group.id)
|
||||
member_count = db.execute(count_stmt).scalar_one()
|
||||
|
||||
response = GroupResponse.model_validate(group)
|
||||
response.member_count = member_count
|
||||
responses.append(response)
|
||||
|
||||
return responses
|
||||
|
||||
|
||||
@router.get("/{group_id}", response_model=GroupResponse)
|
||||
def get_group(
|
||||
board_id: UUID,
|
||||
group_id: UUID,
|
||||
current_user: Annotated[User, Depends(get_current_user)],
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
):
|
||||
"""
|
||||
Get group details by ID.
|
||||
"""
|
||||
repo = BoardRepository(db)
|
||||
|
||||
# Verify board ownership
|
||||
board = repo.get_board_by_id(board_id, current_user.id)
|
||||
if not board:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Board not found",
|
||||
)
|
||||
|
||||
# Get group
|
||||
group = repo.get_group_by_id(group_id, board_id)
|
||||
if not group:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Group not found",
|
||||
)
|
||||
|
||||
# Count members
|
||||
from sqlalchemy import func, select
|
||||
|
||||
from app.database.models.board_image import BoardImage
|
||||
|
||||
count_stmt = select(func.count(BoardImage.id)).where(BoardImage.group_id == group.id)
|
||||
member_count = db.execute(count_stmt).scalar_one()
|
||||
|
||||
response = GroupResponse.model_validate(group)
|
||||
response.member_count = member_count
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@router.patch("/{group_id}", response_model=GroupResponse)
|
||||
def update_group(
|
||||
board_id: UUID,
|
||||
group_id: UUID,
|
||||
group_data: GroupUpdate,
|
||||
current_user: Annotated[User, Depends(get_current_user)],
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
):
|
||||
"""
|
||||
Update group metadata (name, color, annotation).
|
||||
"""
|
||||
repo = BoardRepository(db)
|
||||
|
||||
# Verify board ownership
|
||||
board = repo.get_board_by_id(board_id, current_user.id)
|
||||
if not board:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Board not found",
|
||||
)
|
||||
|
||||
# Update group
|
||||
group = repo.update_group(
|
||||
group_id=group_id,
|
||||
board_id=board_id,
|
||||
name=group_data.name,
|
||||
color=group_data.color,
|
||||
annotation=group_data.annotation,
|
||||
)
|
||||
|
||||
if not group:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Group not found",
|
||||
)
|
||||
|
||||
# Count members
|
||||
from sqlalchemy import func, select
|
||||
|
||||
from app.database.models.board_image import BoardImage
|
||||
|
||||
count_stmt = select(func.count(BoardImage.id)).where(BoardImage.group_id == group.id)
|
||||
member_count = db.execute(count_stmt).scalar_one()
|
||||
|
||||
response = GroupResponse.model_validate(group)
|
||||
response.member_count = member_count
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@router.delete("/{group_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
def delete_group(
|
||||
board_id: UUID,
|
||||
group_id: UUID,
|
||||
current_user: Annotated[User, Depends(get_current_user)],
|
||||
db: Annotated[Session, Depends(get_db)],
|
||||
):
|
||||
"""
|
||||
Delete a group (ungroups all images).
|
||||
"""
|
||||
repo = BoardRepository(db)
|
||||
|
||||
# Verify board ownership
|
||||
board = repo.get_board_by_id(board_id, current_user.id)
|
||||
if not board:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Board not found",
|
||||
)
|
||||
|
||||
# Delete group
|
||||
success = repo.delete_group(group_id, board_id)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Group not found",
|
||||
)
|
||||
470
backend/app/api/images.py
Normal file
470
backend/app/api/images.py
Normal file
@@ -0,0 +1,470 @@
|
||||
"""Image upload and management endpoints."""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, File, HTTPException, UploadFile, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.auth.jwt import get_current_user
|
||||
from app.core.deps import get_db
|
||||
from app.database.models.board import Board
|
||||
from app.database.models.user import User
|
||||
from app.images.processing import generate_thumbnails
|
||||
from app.images.repository import ImageRepository
|
||||
from app.images.schemas import (
|
||||
BoardImageCreate,
|
||||
BoardImageResponse,
|
||||
BoardImageUpdate,
|
||||
BulkImageUpdate,
|
||||
BulkUpdateResponse,
|
||||
ImageListResponse,
|
||||
ImageResponse,
|
||||
ImageUploadResponse,
|
||||
)
|
||||
from app.images.upload import calculate_checksum, upload_image_to_storage
|
||||
from app.images.validation import sanitize_filename, validate_image_file
|
||||
from app.images.zip_handler import extract_images_from_zip
|
||||
|
||||
router = APIRouter(prefix="/images", tags=["images"])
|
||||
|
||||
|
||||
@router.post("/upload", response_model=ImageUploadResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def upload_image(
|
||||
file: UploadFile = File(...),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Upload a single image.
|
||||
|
||||
- Validates file type and size
|
||||
- Uploads to MinIO storage
|
||||
- Generates thumbnails
|
||||
- Creates database record
|
||||
|
||||
Returns image metadata including ID for adding to boards.
|
||||
"""
|
||||
# Validate file
|
||||
contents = await validate_image_file(file)
|
||||
|
||||
# Sanitize filename
|
||||
filename = sanitize_filename(file.filename or "image.jpg")
|
||||
|
||||
# Upload to storage and get dimensions
|
||||
from uuid import uuid4
|
||||
|
||||
image_id = uuid4()
|
||||
storage_path, width, height, mime_type = await upload_image_to_storage(
|
||||
current_user.id, image_id, filename, contents
|
||||
)
|
||||
|
||||
# Generate thumbnails
|
||||
thumbnail_paths = generate_thumbnails(image_id, storage_path, contents)
|
||||
|
||||
# Calculate checksum
|
||||
checksum = calculate_checksum(contents)
|
||||
|
||||
# Create metadata
|
||||
metadata = {"format": mime_type.split("/")[1], "checksum": checksum, "thumbnails": thumbnail_paths}
|
||||
|
||||
# Create database record
|
||||
repo = ImageRepository(db)
|
||||
image = await repo.create_image(
|
||||
user_id=current_user.id,
|
||||
filename=filename,
|
||||
storage_path=storage_path,
|
||||
file_size=len(contents),
|
||||
mime_type=mime_type,
|
||||
width=width,
|
||||
height=height,
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
return image
|
||||
|
||||
|
||||
@router.post("/upload-zip", response_model=list[ImageUploadResponse])
|
||||
async def upload_zip(
|
||||
file: UploadFile = File(...),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Upload multiple images from a ZIP file.
|
||||
|
||||
- Extracts all valid images from ZIP
|
||||
- Processes each image
|
||||
- Returns list of uploaded images
|
||||
|
||||
Maximum ZIP size: 200MB
|
||||
"""
|
||||
uploaded_images = []
|
||||
repo = ImageRepository(db)
|
||||
|
||||
async for filename, contents in extract_images_from_zip(file):
|
||||
try:
|
||||
# Sanitize filename
|
||||
clean_filename = sanitize_filename(filename)
|
||||
|
||||
# Upload to storage
|
||||
from uuid import uuid4
|
||||
|
||||
image_id = uuid4()
|
||||
storage_path, width, height, mime_type = await upload_image_to_storage(
|
||||
current_user.id, image_id, clean_filename, contents
|
||||
)
|
||||
|
||||
# Generate thumbnails
|
||||
thumbnail_paths = generate_thumbnails(image_id, storage_path, contents)
|
||||
|
||||
# Calculate checksum
|
||||
checksum = calculate_checksum(contents)
|
||||
|
||||
# Create metadata
|
||||
metadata = {
|
||||
"format": mime_type.split("/")[1],
|
||||
"checksum": checksum,
|
||||
"thumbnails": thumbnail_paths,
|
||||
}
|
||||
|
||||
# Create database record
|
||||
image = await repo.create_image(
|
||||
user_id=current_user.id,
|
||||
filename=clean_filename,
|
||||
storage_path=storage_path,
|
||||
file_size=len(contents),
|
||||
mime_type=mime_type,
|
||||
width=width,
|
||||
height=height,
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
uploaded_images.append(image)
|
||||
|
||||
except Exception as e:
|
||||
# Log error but continue with other images
|
||||
print(f"Error processing {filename}: {e}")
|
||||
continue
|
||||
|
||||
if not uploaded_images:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="No images could be processed from ZIP")
|
||||
|
||||
return uploaded_images
|
||||
|
||||
|
||||
@router.get("/library", response_model=ImageListResponse)
|
||||
async def get_image_library(
|
||||
page: int = 1,
|
||||
page_size: int = 50,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Get user's image library with pagination.
|
||||
|
||||
Returns all images uploaded by the current user.
|
||||
"""
|
||||
repo = ImageRepository(db)
|
||||
offset = (page - 1) * page_size
|
||||
images, total = await repo.get_user_images(current_user.id, limit=page_size, offset=offset)
|
||||
|
||||
return ImageListResponse(images=list(images), total=total, page=page, page_size=page_size)
|
||||
|
||||
|
||||
@router.get("/{image_id}", response_model=ImageResponse)
|
||||
async def get_image(
|
||||
image_id: UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Get image by ID."""
|
||||
repo = ImageRepository(db)
|
||||
image = await repo.get_image_by_id(image_id)
|
||||
|
||||
if not image:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Image not found")
|
||||
|
||||
# Verify ownership
|
||||
if image.user_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
|
||||
|
||||
return image
|
||||
|
||||
|
||||
@router.delete("/{image_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_image(
|
||||
image_id: UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Delete image permanently.
|
||||
|
||||
Only allowed if reference_count is 0 (not used on any boards).
|
||||
"""
|
||||
repo = ImageRepository(db)
|
||||
image = await repo.get_image_by_id(image_id)
|
||||
|
||||
if not image:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Image not found")
|
||||
|
||||
# Verify ownership
|
||||
if image.user_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
|
||||
|
||||
# Check if still in use
|
||||
if image.reference_count > 0:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Image is still used on {image.reference_count} board(s). Remove from boards first.",
|
||||
)
|
||||
|
||||
# Delete from storage
|
||||
from app.images.processing import delete_thumbnails
|
||||
from app.images.upload import delete_image_from_storage
|
||||
|
||||
await delete_image_from_storage(image.storage_path)
|
||||
if "thumbnails" in image.metadata:
|
||||
await delete_thumbnails(image.metadata["thumbnails"])
|
||||
|
||||
# Delete from database
|
||||
await repo.delete_image(image_id)
|
||||
|
||||
|
||||
@router.post("/boards/{board_id}/images", response_model=BoardImageResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def add_image_to_board(
|
||||
board_id: UUID,
|
||||
data: BoardImageCreate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Add an existing image to a board.
|
||||
|
||||
The image must already be uploaded and owned by the current user.
|
||||
"""
|
||||
# Verify board ownership
|
||||
from sqlalchemy import select
|
||||
|
||||
board_result = await db.execute(select(Board).where(Board.id == board_id))
|
||||
board = board_result.scalar_one_or_none()
|
||||
|
||||
if not board:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Board not found")
|
||||
|
||||
if board.user_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
|
||||
|
||||
# Verify image ownership
|
||||
repo = ImageRepository(db)
|
||||
image = await repo.get_image_by_id(data.image_id)
|
||||
|
||||
if not image:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Image not found")
|
||||
|
||||
if image.user_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Image access denied")
|
||||
|
||||
# Add image to board
|
||||
board_image = await repo.add_image_to_board(
|
||||
board_id=board_id,
|
||||
image_id=data.image_id,
|
||||
position=data.position,
|
||||
transformations=data.transformations,
|
||||
z_order=data.z_order,
|
||||
)
|
||||
|
||||
# Load image relationship for response
|
||||
await db.refresh(board_image, ["image"])
|
||||
|
||||
return board_image
|
||||
|
||||
|
||||
@router.patch("/boards/{board_id}/images/{image_id}", response_model=BoardImageResponse)
|
||||
async def update_board_image(
|
||||
board_id: UUID,
|
||||
image_id: UUID,
|
||||
data: BoardImageUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Update board image position, transformations, z-order, or group.
|
||||
|
||||
This endpoint is optimized for frequent position updates (debounced from frontend).
|
||||
Only provided fields are updated.
|
||||
"""
|
||||
# Verify board ownership
|
||||
from sqlalchemy import select
|
||||
|
||||
board_result = await db.execute(select(Board).where(Board.id == board_id))
|
||||
board = board_result.scalar_one_or_none()
|
||||
|
||||
if not board:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Board not found")
|
||||
|
||||
if board.user_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
|
||||
|
||||
# Update board image
|
||||
repo = ImageRepository(db)
|
||||
board_image = await repo.update_board_image(
|
||||
board_id=board_id,
|
||||
image_id=image_id,
|
||||
position=data.position,
|
||||
transformations=data.transformations,
|
||||
z_order=data.z_order,
|
||||
group_id=data.group_id,
|
||||
)
|
||||
|
||||
if not board_image:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Image not on this board")
|
||||
|
||||
# Load image relationship for response
|
||||
await db.refresh(board_image, ["image"])
|
||||
|
||||
return board_image
|
||||
|
||||
|
||||
@router.delete("/boards/{board_id}/images/{image_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def remove_image_from_board(
|
||||
board_id: UUID,
|
||||
image_id: UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Remove image from board.
|
||||
|
||||
This doesn't delete the image, just removes it from this board.
|
||||
The image remains in the user's library.
|
||||
"""
|
||||
# Verify board ownership
|
||||
from sqlalchemy import select
|
||||
|
||||
board_result = await db.execute(select(Board).where(Board.id == board_id))
|
||||
board = board_result.scalar_one_or_none()
|
||||
|
||||
if not board:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Board not found")
|
||||
|
||||
if board.user_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
|
||||
|
||||
# Remove image from board
|
||||
repo = ImageRepository(db)
|
||||
removed = await repo.remove_image_from_board(board_id, image_id)
|
||||
|
||||
if not removed:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Image not on this board")
|
||||
|
||||
|
||||
@router.patch("/boards/{board_id}/images/bulk", response_model=BulkUpdateResponse)
|
||||
async def bulk_update_board_images(
|
||||
board_id: UUID,
|
||||
data: BulkImageUpdate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Bulk update multiple images on a board.
|
||||
|
||||
Applies the same changes to all specified images. Useful for multi-selection operations.
|
||||
"""
|
||||
# Verify board ownership
|
||||
from sqlalchemy import select
|
||||
|
||||
board_result = await db.execute(select(Board).where(Board.id == board_id))
|
||||
board = board_result.scalar_one_or_none()
|
||||
|
||||
if not board:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Board not found")
|
||||
|
||||
if board.user_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
|
||||
|
||||
# Update each image
|
||||
repo = ImageRepository(db)
|
||||
updated_ids = []
|
||||
failed_count = 0
|
||||
|
||||
for image_id in data.image_ids:
|
||||
try:
|
||||
# Calculate new position if delta provided
|
||||
position = None
|
||||
if data.position_delta:
|
||||
# Get current position
|
||||
board_image = await repo.get_board_image(board_id, image_id)
|
||||
if board_image and board_image.position:
|
||||
current_pos = board_image.position
|
||||
position = {
|
||||
"x": current_pos.get("x", 0) + data.position_delta["dx"],
|
||||
"y": current_pos.get("y", 0) + data.position_delta["dy"],
|
||||
}
|
||||
|
||||
# Calculate new z-order if delta provided
|
||||
z_order = None
|
||||
if data.z_order_delta is not None:
|
||||
board_image = await repo.get_board_image(board_id, image_id)
|
||||
if board_image:
|
||||
z_order = board_image.z_order + data.z_order_delta
|
||||
|
||||
# Update the image
|
||||
updated = await repo.update_board_image(
|
||||
board_id=board_id,
|
||||
image_id=image_id,
|
||||
position=position,
|
||||
transformations=data.transformations,
|
||||
z_order=z_order,
|
||||
group_id=None, # Bulk operations don't change groups
|
||||
)
|
||||
|
||||
if updated:
|
||||
updated_ids.append(image_id)
|
||||
else:
|
||||
failed_count += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error updating image {image_id}: {e}")
|
||||
failed_count += 1
|
||||
continue
|
||||
|
||||
return BulkUpdateResponse(
|
||||
updated_count=len(updated_ids),
|
||||
failed_count=failed_count,
|
||||
image_ids=updated_ids,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/boards/{board_id}/images", response_model=list[BoardImageResponse])
|
||||
async def get_board_images(
|
||||
board_id: UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Get all images on a board, ordered by z-order.
|
||||
|
||||
Used for loading board contents in the canvas.
|
||||
"""
|
||||
# Verify board access (owner or shared link - for now just owner)
|
||||
from sqlalchemy import select
|
||||
|
||||
board_result = await db.execute(select(Board).where(Board.id == board_id))
|
||||
board = board_result.scalar_one_or_none()
|
||||
|
||||
if not board:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Board not found")
|
||||
|
||||
if board.user_id != current_user.id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access denied")
|
||||
|
||||
# Get board images
|
||||
repo = ImageRepository(db)
|
||||
board_images = await repo.get_board_images(board_id)
|
||||
|
||||
# Load image relationships
|
||||
for board_image in board_images:
|
||||
await db.refresh(board_image, ["image"])
|
||||
|
||||
return list(board_images)
|
||||
235
backend/app/api/library.py
Normal file
235
backend/app/api/library.py
Normal file
@@ -0,0 +1,235 @@
|
||||
"""Image library API endpoints."""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.core.deps import get_current_user, get_db
|
||||
from app.database.models.board_image import BoardImage
|
||||
from app.database.models.image import Image
|
||||
from app.database.models.user import User
|
||||
from app.images.search import count_images, search_images
|
||||
|
||||
router = APIRouter(tags=["library"])
|
||||
|
||||
|
||||
class ImageLibraryResponse(BaseModel):
|
||||
"""Response schema for library image."""
|
||||
|
||||
id: str
|
||||
filename: str
|
||||
file_size: int
|
||||
mime_type: str
|
||||
width: int
|
||||
height: int
|
||||
reference_count: int
|
||||
created_at: str
|
||||
thumbnail_url: str | None = None
|
||||
|
||||
|
||||
class ImageLibraryListResponse(BaseModel):
|
||||
"""Response schema for library listing."""
|
||||
|
||||
images: list[ImageLibraryResponse]
|
||||
total: int
|
||||
limit: int
|
||||
offset: int
|
||||
|
||||
|
||||
class AddToBoardRequest(BaseModel):
|
||||
"""Request schema for adding library image to board."""
|
||||
|
||||
board_id: str
|
||||
position: dict = {"x": 0, "y": 0}
|
||||
|
||||
|
||||
@router.get("/library/images", response_model=ImageLibraryListResponse)
|
||||
def list_library_images(
|
||||
query: str | None = Query(None, description="Search query"),
|
||||
limit: int = Query(50, ge=1, le=100, description="Results per page"),
|
||||
offset: int = Query(0, ge=0, description="Pagination offset"),
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
) -> ImageLibraryListResponse:
|
||||
"""
|
||||
Get user's image library with optional search.
|
||||
|
||||
Returns all images owned by the user, regardless of board usage.
|
||||
"""
|
||||
# Search images
|
||||
images = search_images(str(current_user.id), db, query=query, limit=limit, offset=offset)
|
||||
|
||||
# Count total
|
||||
total = count_images(str(current_user.id), db, query=query)
|
||||
|
||||
# Convert to response format
|
||||
image_responses = []
|
||||
for img in images:
|
||||
thumbnails = img.image_metadata.get("thumbnails", {})
|
||||
image_responses.append(
|
||||
ImageLibraryResponse(
|
||||
id=str(img.id),
|
||||
filename=img.filename,
|
||||
file_size=img.file_size,
|
||||
mime_type=img.mime_type,
|
||||
width=img.width,
|
||||
height=img.height,
|
||||
reference_count=img.reference_count,
|
||||
created_at=img.created_at.isoformat(),
|
||||
thumbnail_url=thumbnails.get("medium"),
|
||||
)
|
||||
)
|
||||
|
||||
return ImageLibraryListResponse(images=image_responses, total=total, limit=limit, offset=offset)
|
||||
|
||||
|
||||
@router.post("/library/images/{image_id}/add-to-board", status_code=status.HTTP_201_CREATED)
|
||||
def add_library_image_to_board(
|
||||
image_id: UUID,
|
||||
request: AddToBoardRequest,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
) -> dict:
|
||||
"""
|
||||
Add an existing library image to a board.
|
||||
|
||||
Creates a new BoardImage reference without duplicating the file.
|
||||
Increments reference count on the image.
|
||||
"""
|
||||
# Verify image exists and user owns it
|
||||
image = db.query(Image).filter(Image.id == image_id, Image.user_id == current_user.id).first()
|
||||
|
||||
if image is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Image not found in library",
|
||||
)
|
||||
|
||||
# Verify board exists and user owns it
|
||||
from app.database.models.board import Board
|
||||
|
||||
board = db.query(Board).filter(Board.id == request.board_id, Board.user_id == current_user.id).first()
|
||||
|
||||
if board is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Board not found or access denied",
|
||||
)
|
||||
|
||||
# Check if image already on this board
|
||||
existing = (
|
||||
db.query(BoardImage).filter(BoardImage.board_id == request.board_id, BoardImage.image_id == image_id).first()
|
||||
)
|
||||
|
||||
if existing:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="Image already exists on this board",
|
||||
)
|
||||
|
||||
# Get max z_order for board
|
||||
max_z = (
|
||||
db.query(BoardImage.z_order)
|
||||
.filter(BoardImage.board_id == request.board_id)
|
||||
.order_by(BoardImage.z_order.desc())
|
||||
.first()
|
||||
)
|
||||
|
||||
next_z = (max_z[0] + 1) if max_z else 0
|
||||
|
||||
# Create BoardImage reference
|
||||
board_image = BoardImage(
|
||||
board_id=UUID(request.board_id),
|
||||
image_id=image_id,
|
||||
position=request.position,
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=next_z,
|
||||
)
|
||||
db.add(board_image)
|
||||
|
||||
# Increment reference count
|
||||
image.reference_count += 1
|
||||
|
||||
db.commit()
|
||||
db.refresh(board_image)
|
||||
|
||||
return {"id": str(board_image.id), "message": "Image added to board successfully"}
|
||||
|
||||
|
||||
@router.delete("/library/images/{image_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
def delete_library_image(
|
||||
image_id: UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
) -> None:
|
||||
"""
|
||||
Permanently delete an image from library.
|
||||
|
||||
Removes image from all boards and deletes from storage.
|
||||
Only allowed if user owns the image.
|
||||
"""
|
||||
from app.core.storage import storage_client
|
||||
|
||||
# Get image
|
||||
image = db.query(Image).filter(Image.id == image_id, Image.user_id == current_user.id).first()
|
||||
|
||||
if image is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Image not found in library",
|
||||
)
|
||||
|
||||
# Delete all BoardImage references
|
||||
db.query(BoardImage).filter(BoardImage.image_id == image_id).delete()
|
||||
|
||||
# Delete from storage
|
||||
import contextlib
|
||||
|
||||
try:
|
||||
storage_client.delete_file(image.storage_path)
|
||||
# Also delete thumbnails if they exist
|
||||
thumbnails = image.image_metadata.get("thumbnails", {})
|
||||
for thumb_path in thumbnails.values():
|
||||
if thumb_path:
|
||||
with contextlib.suppress(Exception):
|
||||
storage_client.delete_file(thumb_path)
|
||||
except Exception as e:
|
||||
# Log error but continue with database deletion
|
||||
print(f"Warning: Failed to delete image from storage: {str(e)}")
|
||||
|
||||
# Delete database record
|
||||
db.delete(image)
|
||||
db.commit()
|
||||
|
||||
|
||||
@router.get("/library/stats")
|
||||
def get_library_stats(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
) -> dict:
|
||||
"""
|
||||
Get statistics about user's image library.
|
||||
|
||||
Returns total images, total size, and usage across boards.
|
||||
"""
|
||||
images = db.query(Image).filter(Image.user_id == current_user.id).all()
|
||||
|
||||
total_images = len(images)
|
||||
total_size = sum(img.file_size for img in images)
|
||||
total_references = sum(img.reference_count for img in images)
|
||||
|
||||
return {
|
||||
"total_images": total_images,
|
||||
"total_size_bytes": total_size,
|
||||
"total_board_references": total_references,
|
||||
"average_references_per_image": total_references / total_images if total_images > 0 else 0,
|
||||
}
|
||||
79
backend/app/api/quality.py
Normal file
79
backend/app/api/quality.py
Normal file
@@ -0,0 +1,79 @@
|
||||
"""Connection quality detection and testing endpoints."""
|
||||
|
||||
import time
|
||||
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
|
||||
router = APIRouter(tags=["quality"])
|
||||
|
||||
|
||||
class ConnectionTestRequest(BaseModel):
|
||||
"""Request schema for connection test."""
|
||||
|
||||
test_size_bytes: int = 100000 # 100KB default test size
|
||||
|
||||
|
||||
class ConnectionTestResponse(BaseModel):
|
||||
"""Response schema for connection test results."""
|
||||
|
||||
speed_mbps: float
|
||||
latency_ms: float
|
||||
quality_tier: str # 'low', 'medium', 'high'
|
||||
recommended_thumbnail: str # 'low', 'medium', 'high'
|
||||
|
||||
|
||||
@router.post("/connection/test", response_model=ConnectionTestResponse)
|
||||
async def test_connection_speed(request: ConnectionTestRequest) -> ConnectionTestResponse:
|
||||
"""
|
||||
Test connection speed and return quality recommendation.
|
||||
|
||||
This endpoint helps determine appropriate thumbnail quality.
|
||||
The client measures download time of test data to calculate speed.
|
||||
|
||||
Args:
|
||||
request: Test configuration
|
||||
|
||||
Returns:
|
||||
Connection quality information and recommendations
|
||||
"""
|
||||
# Record start time for latency measurement
|
||||
start_time = time.time()
|
||||
|
||||
# Simulate latency measurement (in real implementation, client measures this)
|
||||
latency_ms = (time.time() - start_time) * 1000
|
||||
|
||||
# Client will measure actual download time
|
||||
# Here we just provide the test data size for calculation
|
||||
# The client calculates: speed_mbps = (test_size_bytes * 8) / (download_time_seconds * 1_000_000)
|
||||
|
||||
# For now, we return a standard response
|
||||
# In practice, the client does the speed calculation
|
||||
return ConnectionTestResponse(
|
||||
speed_mbps=0.0, # Client calculates this
|
||||
latency_ms=latency_ms,
|
||||
quality_tier="medium",
|
||||
recommended_thumbnail="medium",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/connection/test-data")
|
||||
async def get_test_data(size: int = 100000) -> bytes:
|
||||
"""
|
||||
Serve test data for connection speed measurement.
|
||||
|
||||
Client downloads this and measures time to calculate speed.
|
||||
|
||||
Args:
|
||||
size: Size of test data in bytes (max 500KB)
|
||||
|
||||
Returns:
|
||||
Random bytes for speed testing
|
||||
"""
|
||||
import secrets
|
||||
|
||||
# Cap size at 500KB to prevent abuse
|
||||
size = min(size, 500000)
|
||||
|
||||
# Generate random bytes
|
||||
return secrets.token_bytes(size)
|
||||
277
backend/app/api/sharing.py
Normal file
277
backend/app/api/sharing.py
Normal file
@@ -0,0 +1,277 @@
|
||||
"""Board sharing API endpoints."""
|
||||
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.boards.schemas import (
|
||||
BoardDetail,
|
||||
CommentCreate,
|
||||
CommentResponse,
|
||||
ShareLinkCreate,
|
||||
ShareLinkResponse,
|
||||
)
|
||||
from app.boards.sharing import generate_secure_token
|
||||
from app.core.deps import get_current_user, get_db
|
||||
from app.database.models.board import Board
|
||||
from app.database.models.comment import Comment
|
||||
from app.database.models.share_link import ShareLink
|
||||
from app.database.models.user import User
|
||||
|
||||
router = APIRouter(tags=["sharing"])
|
||||
|
||||
|
||||
def validate_share_link(token: str, db: Session, required_permission: str = "view-only") -> ShareLink:
|
||||
"""
|
||||
Validate share link token and check permissions.
|
||||
|
||||
Args:
|
||||
token: Share link token
|
||||
db: Database session
|
||||
required_permission: Required permission level
|
||||
|
||||
Returns:
|
||||
ShareLink if valid
|
||||
|
||||
Raises:
|
||||
HTTPException: 403 if invalid or insufficient permissions
|
||||
"""
|
||||
share_link = (
|
||||
db.query(ShareLink)
|
||||
.filter(
|
||||
ShareLink.token == token,
|
||||
ShareLink.is_revoked == False, # noqa: E712
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if share_link is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Invalid or revoked share link",
|
||||
)
|
||||
|
||||
# Check expiration
|
||||
if share_link.expires_at and share_link.expires_at < datetime.utcnow():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Share link has expired",
|
||||
)
|
||||
|
||||
# Check permission level
|
||||
if required_permission == "view-comment" and share_link.permission_level != "view-comment":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Insufficient permissions - commenting not allowed",
|
||||
)
|
||||
|
||||
# Update access tracking
|
||||
share_link.access_count += 1
|
||||
share_link.last_accessed_at = datetime.utcnow()
|
||||
db.commit()
|
||||
|
||||
return share_link
|
||||
|
||||
|
||||
@router.post("/boards/{board_id}/share-links", response_model=ShareLinkResponse, status_code=status.HTTP_201_CREATED)
|
||||
def create_share_link(
|
||||
board_id: UUID,
|
||||
share_link_data: ShareLinkCreate,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
) -> ShareLinkResponse:
|
||||
"""
|
||||
Create a new share link for a board.
|
||||
|
||||
Only the board owner can create share links.
|
||||
"""
|
||||
# Verify board exists and user owns it
|
||||
board = db.query(Board).filter(Board.id == board_id, Board.user_id == current_user.id).first()
|
||||
|
||||
if board is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Board not found or access denied",
|
||||
)
|
||||
|
||||
# Generate unique token
|
||||
token = generate_secure_token()
|
||||
|
||||
# Create share link
|
||||
share_link = ShareLink(
|
||||
board_id=board_id,
|
||||
token=token,
|
||||
permission_level=share_link_data.permission_level,
|
||||
expires_at=share_link_data.expires_at,
|
||||
)
|
||||
db.add(share_link)
|
||||
db.commit()
|
||||
db.refresh(share_link)
|
||||
|
||||
return ShareLinkResponse.model_validate(share_link)
|
||||
|
||||
|
||||
@router.get("/boards/{board_id}/share-links", response_model=list[ShareLinkResponse])
|
||||
def list_share_links(
|
||||
board_id: UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
) -> list[ShareLinkResponse]:
|
||||
"""
|
||||
List all share links for a board.
|
||||
|
||||
Only the board owner can list share links.
|
||||
"""
|
||||
# Verify board exists and user owns it
|
||||
board = db.query(Board).filter(Board.id == board_id, Board.user_id == current_user.id).first()
|
||||
|
||||
if board is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Board not found or access denied",
|
||||
)
|
||||
|
||||
# Get all share links for board
|
||||
share_links = db.query(ShareLink).filter(ShareLink.board_id == board_id).order_by(ShareLink.created_at.desc()).all()
|
||||
|
||||
return [ShareLinkResponse.model_validate(link) for link in share_links]
|
||||
|
||||
|
||||
@router.delete("/boards/{board_id}/share-links/{link_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
def revoke_share_link(
|
||||
board_id: UUID,
|
||||
link_id: UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
) -> None:
|
||||
"""
|
||||
Revoke (soft delete) a share link.
|
||||
|
||||
Only the board owner can revoke share links.
|
||||
"""
|
||||
# Verify board exists and user owns it
|
||||
board = db.query(Board).filter(Board.id == board_id, Board.user_id == current_user.id).first()
|
||||
|
||||
if board is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Board not found or access denied",
|
||||
)
|
||||
|
||||
# Get and revoke share link
|
||||
share_link = db.query(ShareLink).filter(ShareLink.id == link_id, ShareLink.board_id == board_id).first()
|
||||
|
||||
if share_link is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Share link not found",
|
||||
)
|
||||
|
||||
share_link.is_revoked = True
|
||||
db.commit()
|
||||
|
||||
|
||||
@router.get("/shared/{token}", response_model=BoardDetail)
|
||||
def get_shared_board(
|
||||
token: str,
|
||||
db: Session = Depends(get_db),
|
||||
) -> BoardDetail:
|
||||
"""
|
||||
Access a shared board via token.
|
||||
|
||||
No authentication required - access controlled by share link token.
|
||||
"""
|
||||
# Validate share link
|
||||
share_link = validate_share_link(token, db, required_permission="view-only")
|
||||
|
||||
# Get board details
|
||||
board = db.query(Board).filter(Board.id == share_link.board_id).first()
|
||||
|
||||
if board is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Board not found",
|
||||
)
|
||||
|
||||
return BoardDetail.model_validate(board)
|
||||
|
||||
|
||||
@router.post("/shared/{token}/comments", response_model=CommentResponse, status_code=status.HTTP_201_CREATED)
|
||||
def create_comment(
|
||||
token: str,
|
||||
comment_data: CommentCreate,
|
||||
db: Session = Depends(get_db),
|
||||
) -> CommentResponse:
|
||||
"""
|
||||
Create a comment on a shared board.
|
||||
|
||||
Requires view-comment permission level.
|
||||
"""
|
||||
# Validate share link with comment permission
|
||||
share_link = validate_share_link(token, db, required_permission="view-comment")
|
||||
|
||||
# Create comment
|
||||
comment = Comment(
|
||||
board_id=share_link.board_id,
|
||||
share_link_id=share_link.id,
|
||||
author_name=comment_data.author_name,
|
||||
content=comment_data.content,
|
||||
position=comment_data.position,
|
||||
)
|
||||
db.add(comment)
|
||||
db.commit()
|
||||
db.refresh(comment)
|
||||
|
||||
return CommentResponse.model_validate(comment)
|
||||
|
||||
|
||||
@router.get("/shared/{token}/comments", response_model=list[CommentResponse])
|
||||
def list_comments(
|
||||
token: str,
|
||||
db: Session = Depends(get_db),
|
||||
) -> list[CommentResponse]:
|
||||
"""
|
||||
List all comments on a shared board.
|
||||
|
||||
Requires view-only or view-comment permission.
|
||||
"""
|
||||
# Validate share link
|
||||
share_link = validate_share_link(token, db, required_permission="view-only")
|
||||
|
||||
# Get all comments for board (non-deleted)
|
||||
comments = (
|
||||
db.query(Comment)
|
||||
.filter(Comment.board_id == share_link.board_id, Comment.is_deleted == False) # noqa: E712
|
||||
.order_by(Comment.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
|
||||
return [CommentResponse.model_validate(comment) for comment in comments]
|
||||
|
||||
|
||||
@router.get("/boards/{board_id}/comments", response_model=list[CommentResponse])
|
||||
def list_board_comments(
|
||||
board_id: UUID,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db),
|
||||
) -> list[CommentResponse]:
|
||||
"""
|
||||
List all comments on a board (owner view).
|
||||
|
||||
Only the board owner can access this endpoint.
|
||||
"""
|
||||
# Verify board exists and user owns it
|
||||
board = db.query(Board).filter(Board.id == board_id, Board.user_id == current_user.id).first()
|
||||
|
||||
if board is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Board not found or access denied",
|
||||
)
|
||||
|
||||
# Get all comments for board (including deleted for owner)
|
||||
comments = db.query(Comment).filter(Comment.board_id == board_id).order_by(Comment.created_at.desc()).all()
|
||||
|
||||
return [CommentResponse.model_validate(comment) for comment in comments]
|
||||
1
backend/app/auth/__init__.py
Normal file
1
backend/app/auth/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Authentication module."""
|
||||
48
backend/app/auth/jwt.py
Normal file
48
backend/app/auth/jwt.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""JWT token generation and validation."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from uuid import UUID
|
||||
|
||||
from jose import JWTError, jwt
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
|
||||
def create_access_token(user_id: UUID, email: str, expires_delta: timedelta | None = None) -> str:
|
||||
"""
|
||||
Create a new JWT access token.
|
||||
|
||||
Args:
|
||||
user_id: User's UUID
|
||||
email: User's email address
|
||||
expires_delta: Optional custom expiration time
|
||||
|
||||
Returns:
|
||||
Encoded JWT token string
|
||||
"""
|
||||
if expires_delta:
|
||||
expire = datetime.utcnow() + expires_delta
|
||||
else:
|
||||
expire = datetime.utcnow() + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
|
||||
to_encode = {"sub": str(user_id), "email": email, "exp": expire, "iat": datetime.utcnow(), "type": "access"}
|
||||
|
||||
encoded_jwt = jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
|
||||
return encoded_jwt
|
||||
|
||||
|
||||
def decode_access_token(token: str) -> dict | None:
|
||||
"""
|
||||
Decode and validate a JWT access token.
|
||||
|
||||
Args:
|
||||
token: JWT token string to decode
|
||||
|
||||
Returns:
|
||||
Decoded token payload if valid, None otherwise
|
||||
"""
|
||||
try:
|
||||
payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
|
||||
return payload
|
||||
except JWTError:
|
||||
return None
|
||||
81
backend/app/auth/repository.py
Normal file
81
backend/app/auth/repository.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""User repository for database operations."""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.auth.security import hash_password
|
||||
from app.database.models.user import User
|
||||
|
||||
|
||||
class UserRepository:
|
||||
"""Repository for user database operations."""
|
||||
|
||||
def __init__(self, db: Session):
|
||||
"""
|
||||
Initialize repository.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
"""
|
||||
self.db = db
|
||||
|
||||
def create_user(self, email: str, password: str) -> User:
|
||||
"""
|
||||
Create a new user.
|
||||
|
||||
Args:
|
||||
email: User email (will be lowercased)
|
||||
password: Plain text password (will be hashed)
|
||||
|
||||
Returns:
|
||||
Created user instance
|
||||
"""
|
||||
email = email.lower()
|
||||
password_hash = hash_password(password)
|
||||
|
||||
user = User(email=email, password_hash=password_hash)
|
||||
|
||||
self.db.add(user)
|
||||
self.db.commit()
|
||||
self.db.refresh(user)
|
||||
|
||||
return user
|
||||
|
||||
def get_user_by_email(self, email: str) -> User | None:
|
||||
"""
|
||||
Get user by email address.
|
||||
|
||||
Args:
|
||||
email: User email to search for
|
||||
|
||||
Returns:
|
||||
User if found, None otherwise
|
||||
"""
|
||||
email = email.lower()
|
||||
return self.db.query(User).filter(User.email == email).first()
|
||||
|
||||
def get_user_by_id(self, user_id: UUID) -> User | None:
|
||||
"""
|
||||
Get user by ID.
|
||||
|
||||
Args:
|
||||
user_id: User UUID
|
||||
|
||||
Returns:
|
||||
User if found, None otherwise
|
||||
"""
|
||||
return self.db.query(User).filter(User.id == user_id).first()
|
||||
|
||||
def email_exists(self, email: str) -> bool:
|
||||
"""
|
||||
Check if email already exists.
|
||||
|
||||
Args:
|
||||
email: Email to check
|
||||
|
||||
Returns:
|
||||
True if email exists, False otherwise
|
||||
"""
|
||||
email = email.lower()
|
||||
return self.db.query(User).filter(User.email == email).first() is not None
|
||||
44
backend/app/auth/schemas.py
Normal file
44
backend/app/auth/schemas.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Authentication schemas for request/response validation."""
|
||||
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, EmailStr, Field
|
||||
|
||||
|
||||
class UserBase(BaseModel):
|
||||
"""Base user schema."""
|
||||
|
||||
email: EmailStr
|
||||
|
||||
|
||||
class UserCreate(UserBase):
|
||||
"""Schema for user registration."""
|
||||
|
||||
password: str = Field(..., min_length=8, max_length=100)
|
||||
|
||||
|
||||
class UserLogin(BaseModel):
|
||||
"""Schema for user login."""
|
||||
|
||||
email: EmailStr
|
||||
password: str
|
||||
|
||||
|
||||
class UserResponse(UserBase):
|
||||
"""Schema for user response."""
|
||||
|
||||
id: UUID
|
||||
created_at: datetime
|
||||
is_active: bool
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class TokenResponse(BaseModel):
|
||||
"""Schema for JWT token response."""
|
||||
|
||||
access_token: str
|
||||
token_type: str = "bearer"
|
||||
user: UserResponse
|
||||
66
backend/app/auth/security.py
Normal file
66
backend/app/auth/security.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""Password hashing utilities using passlib."""
|
||||
|
||||
import re
|
||||
|
||||
from passlib.context import CryptContext
|
||||
|
||||
# Create password context for hashing and verification
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
|
||||
|
||||
def hash_password(password: str) -> str:
|
||||
"""
|
||||
Hash a password using bcrypt.
|
||||
|
||||
Args:
|
||||
password: Plain text password
|
||||
|
||||
Returns:
|
||||
Hashed password string
|
||||
"""
|
||||
return pwd_context.hash(password)
|
||||
|
||||
|
||||
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||
"""
|
||||
Verify a plain password against a hashed password.
|
||||
|
||||
Args:
|
||||
plain_password: Plain text password to verify
|
||||
hashed_password: Hashed password from database
|
||||
|
||||
Returns:
|
||||
True if password matches, False otherwise
|
||||
"""
|
||||
return pwd_context.verify(plain_password, hashed_password)
|
||||
|
||||
|
||||
def validate_password_strength(password: str) -> tuple[bool, str]:
|
||||
"""
|
||||
Validate password meets complexity requirements.
|
||||
|
||||
Requirements:
|
||||
- At least 8 characters
|
||||
- At least 1 uppercase letter
|
||||
- At least 1 lowercase letter
|
||||
- At least 1 number
|
||||
|
||||
Args:
|
||||
password: Plain text password to validate
|
||||
|
||||
Returns:
|
||||
Tuple of (is_valid, error_message)
|
||||
"""
|
||||
if len(password) < 8:
|
||||
return False, "Password must be at least 8 characters long"
|
||||
|
||||
if not re.search(r"[A-Z]", password):
|
||||
return False, "Password must contain at least one uppercase letter"
|
||||
|
||||
if not re.search(r"[a-z]", password):
|
||||
return False, "Password must contain at least one lowercase letter"
|
||||
|
||||
if not re.search(r"\d", password):
|
||||
return False, "Password must contain at least one number"
|
||||
|
||||
return True, ""
|
||||
1
backend/app/boards/__init__.py
Normal file
1
backend/app/boards/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Boards module for board management."""
|
||||
29
backend/app/boards/permissions.py
Normal file
29
backend/app/boards/permissions.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""Permission validation middleware for boards."""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.boards.repository import BoardRepository
|
||||
|
||||
|
||||
def validate_board_ownership(board_id: UUID, user_id: UUID, db: Session) -> None:
|
||||
"""
|
||||
Validate that the user owns the board.
|
||||
|
||||
Args:
|
||||
board_id: Board UUID
|
||||
user_id: User UUID
|
||||
db: Database session
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if board not found or not owned by user
|
||||
"""
|
||||
repo = BoardRepository(db)
|
||||
|
||||
if not repo.board_exists(board_id, user_id):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Board {board_id} not found or access denied",
|
||||
)
|
||||
408
backend/app/boards/repository.py
Normal file
408
backend/app/boards/repository.py
Normal file
@@ -0,0 +1,408 @@
|
||||
"""Board repository for database operations."""
|
||||
|
||||
from collections.abc import Sequence
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database.models.board import Board
|
||||
from app.database.models.board_image import BoardImage
|
||||
from app.database.models.group import Group
|
||||
|
||||
|
||||
class BoardRepository:
|
||||
"""Repository for Board database operations."""
|
||||
|
||||
def __init__(self, db: Session):
|
||||
"""
|
||||
Initialize repository with database session.
|
||||
|
||||
Args:
|
||||
db: SQLAlchemy database session
|
||||
"""
|
||||
self.db = db
|
||||
|
||||
def create_board(
|
||||
self,
|
||||
user_id: UUID,
|
||||
title: str,
|
||||
description: str | None = None,
|
||||
viewport_state: dict | None = None,
|
||||
) -> Board:
|
||||
"""
|
||||
Create a new board.
|
||||
|
||||
Args:
|
||||
user_id: Owner's user ID
|
||||
title: Board title
|
||||
description: Optional board description
|
||||
viewport_state: Optional custom viewport state
|
||||
|
||||
Returns:
|
||||
Created Board instance
|
||||
"""
|
||||
if viewport_state is None:
|
||||
viewport_state = {"x": 0, "y": 0, "zoom": 1.0, "rotation": 0}
|
||||
|
||||
board = Board(
|
||||
user_id=user_id,
|
||||
title=title,
|
||||
description=description,
|
||||
viewport_state=viewport_state,
|
||||
)
|
||||
|
||||
self.db.add(board)
|
||||
self.db.commit()
|
||||
self.db.refresh(board)
|
||||
|
||||
return board
|
||||
|
||||
def get_board_by_id(self, board_id: UUID, user_id: UUID) -> Board | None:
|
||||
"""
|
||||
Get board by ID for a specific user.
|
||||
|
||||
Args:
|
||||
board_id: Board UUID
|
||||
user_id: User UUID (for ownership check)
|
||||
|
||||
Returns:
|
||||
Board if found and owned by user, None otherwise
|
||||
"""
|
||||
stmt = select(Board).where(
|
||||
Board.id == board_id,
|
||||
Board.user_id == user_id,
|
||||
Board.is_deleted == False, # noqa: E712
|
||||
)
|
||||
|
||||
return self.db.execute(stmt).scalar_one_or_none()
|
||||
|
||||
def get_user_boards(
|
||||
self,
|
||||
user_id: UUID,
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
) -> tuple[Sequence[Board], int]:
|
||||
"""
|
||||
Get all boards for a user with pagination.
|
||||
|
||||
Args:
|
||||
user_id: User UUID
|
||||
limit: Maximum number of boards to return
|
||||
offset: Number of boards to skip
|
||||
|
||||
Returns:
|
||||
Tuple of (list of boards, total count)
|
||||
"""
|
||||
# Query for boards with image count
|
||||
stmt = (
|
||||
select(Board, func.count(BoardImage.id).label("image_count"))
|
||||
.outerjoin(BoardImage, Board.id == BoardImage.board_id)
|
||||
.where(Board.user_id == user_id, Board.is_deleted == False) # noqa: E712
|
||||
.group_by(Board.id)
|
||||
.order_by(Board.updated_at.desc())
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
)
|
||||
|
||||
results = self.db.execute(stmt).all()
|
||||
boards = [row[0] for row in results]
|
||||
|
||||
# Get total count
|
||||
count_stmt = select(func.count(Board.id)).where(Board.user_id == user_id, Board.is_deleted == False) # noqa: E712
|
||||
|
||||
total = self.db.execute(count_stmt).scalar_one()
|
||||
|
||||
return boards, total
|
||||
|
||||
def update_board(
|
||||
self,
|
||||
board_id: UUID,
|
||||
user_id: UUID,
|
||||
title: str | None = None,
|
||||
description: str | None = None,
|
||||
viewport_state: dict | None = None,
|
||||
) -> Board | None:
|
||||
"""
|
||||
Update board metadata.
|
||||
|
||||
Args:
|
||||
board_id: Board UUID
|
||||
user_id: User UUID (for ownership check)
|
||||
title: New title (if provided)
|
||||
description: New description (if provided)
|
||||
viewport_state: New viewport state (if provided)
|
||||
|
||||
Returns:
|
||||
Updated Board if found and owned by user, None otherwise
|
||||
"""
|
||||
board = self.get_board_by_id(board_id, user_id)
|
||||
|
||||
if not board:
|
||||
return None
|
||||
|
||||
if title is not None:
|
||||
board.title = title
|
||||
|
||||
if description is not None:
|
||||
board.description = description
|
||||
|
||||
if viewport_state is not None:
|
||||
board.viewport_state = viewport_state
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(board)
|
||||
|
||||
return board
|
||||
|
||||
def delete_board(self, board_id: UUID, user_id: UUID) -> bool:
|
||||
"""
|
||||
Soft delete a board.
|
||||
|
||||
Args:
|
||||
board_id: Board UUID
|
||||
user_id: User UUID (for ownership check)
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found or not owned
|
||||
"""
|
||||
board = self.get_board_by_id(board_id, user_id)
|
||||
|
||||
if not board:
|
||||
return False
|
||||
|
||||
board.is_deleted = True
|
||||
self.db.commit()
|
||||
|
||||
return True
|
||||
|
||||
def board_exists(self, board_id: UUID, user_id: UUID) -> bool:
|
||||
"""
|
||||
Check if board exists and is owned by user.
|
||||
|
||||
Args:
|
||||
board_id: Board UUID
|
||||
user_id: User UUID
|
||||
|
||||
Returns:
|
||||
True if board exists and is owned by user
|
||||
"""
|
||||
stmt = select(func.count(Board.id)).where(
|
||||
Board.id == board_id,
|
||||
Board.user_id == user_id,
|
||||
Board.is_deleted == False, # noqa: E712
|
||||
)
|
||||
|
||||
count = self.db.execute(stmt).scalar_one()
|
||||
|
||||
return count > 0
|
||||
|
||||
# Group operations
|
||||
|
||||
def create_group(
|
||||
self,
|
||||
board_id: UUID,
|
||||
name: str,
|
||||
color: str,
|
||||
annotation: str | None,
|
||||
image_ids: list[UUID],
|
||||
) -> Group:
|
||||
"""
|
||||
Create a new group and assign images to it.
|
||||
|
||||
Args:
|
||||
board_id: Board UUID
|
||||
name: Group name
|
||||
color: Hex color code
|
||||
annotation: Optional annotation text
|
||||
image_ids: List of board_image IDs to include
|
||||
|
||||
Returns:
|
||||
Created Group instance
|
||||
"""
|
||||
group = Group(
|
||||
board_id=board_id,
|
||||
name=name,
|
||||
color=color,
|
||||
annotation=annotation,
|
||||
)
|
||||
|
||||
self.db.add(group)
|
||||
self.db.flush() # Get group ID
|
||||
|
||||
# Assign images to group
|
||||
for image_id in image_ids:
|
||||
stmt = select(BoardImage).where(BoardImage.board_id == board_id, BoardImage.image_id == image_id)
|
||||
board_image = self.db.execute(stmt).scalar_one_or_none()
|
||||
|
||||
if board_image:
|
||||
board_image.group_id = group.id
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(group)
|
||||
|
||||
return group
|
||||
|
||||
def get_board_groups(self, board_id: UUID) -> Sequence[Group]:
|
||||
"""
|
||||
Get all groups for a board with member counts.
|
||||
|
||||
Args:
|
||||
board_id: Board UUID
|
||||
|
||||
Returns:
|
||||
List of groups
|
||||
"""
|
||||
stmt = (
|
||||
select(Group, func.count(BoardImage.id).label("member_count"))
|
||||
.outerjoin(BoardImage, Group.id == BoardImage.group_id)
|
||||
.where(Group.board_id == board_id)
|
||||
.group_by(Group.id)
|
||||
.order_by(Group.created_at.desc())
|
||||
)
|
||||
|
||||
results = self.db.execute(stmt).all()
|
||||
|
||||
# Add member_count as attribute
|
||||
groups = []
|
||||
for row in results:
|
||||
group = row[0]
|
||||
# Note: member_count is dynamically calculated, not stored
|
||||
groups.append(group)
|
||||
|
||||
return groups
|
||||
|
||||
def get_group_by_id(self, group_id: UUID, board_id: UUID) -> Group | None:
|
||||
"""
|
||||
Get group by ID.
|
||||
|
||||
Args:
|
||||
group_id: Group UUID
|
||||
board_id: Board UUID (for verification)
|
||||
|
||||
Returns:
|
||||
Group if found, None otherwise
|
||||
"""
|
||||
stmt = select(Group).where(Group.id == group_id, Group.board_id == board_id)
|
||||
|
||||
return self.db.execute(stmt).scalar_one_or_none()
|
||||
|
||||
def update_group(
|
||||
self,
|
||||
group_id: UUID,
|
||||
board_id: UUID,
|
||||
name: str | None = None,
|
||||
color: str | None = None,
|
||||
annotation: str | None = None,
|
||||
) -> Group | None:
|
||||
"""
|
||||
Update group metadata.
|
||||
|
||||
Args:
|
||||
group_id: Group UUID
|
||||
board_id: Board UUID
|
||||
name: New name (if provided)
|
||||
color: New color (if provided)
|
||||
annotation: New annotation (if provided)
|
||||
|
||||
Returns:
|
||||
Updated Group if found, None otherwise
|
||||
"""
|
||||
group = self.get_group_by_id(group_id, board_id)
|
||||
|
||||
if not group:
|
||||
return None
|
||||
|
||||
if name is not None:
|
||||
group.name = name
|
||||
|
||||
if color is not None:
|
||||
group.color = color
|
||||
|
||||
if annotation is not None:
|
||||
group.annotation = annotation
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(group)
|
||||
|
||||
return group
|
||||
|
||||
def delete_group(self, group_id: UUID, board_id: UUID) -> bool:
|
||||
"""
|
||||
Delete a group and ungroup its members.
|
||||
|
||||
Args:
|
||||
group_id: Group UUID
|
||||
board_id: Board UUID
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found
|
||||
"""
|
||||
group = self.get_group_by_id(group_id, board_id)
|
||||
|
||||
if not group:
|
||||
return False
|
||||
|
||||
# Ungroup all members (set group_id to None)
|
||||
stmt = select(BoardImage).where(BoardImage.group_id == group_id)
|
||||
members = self.db.execute(stmt).scalars().all()
|
||||
|
||||
for member in members:
|
||||
member.group_id = None
|
||||
|
||||
# Delete the group
|
||||
self.db.delete(group)
|
||||
self.db.commit()
|
||||
|
||||
return True
|
||||
|
||||
def add_images_to_group(self, group_id: UUID, board_id: UUID, image_ids: list[UUID]) -> int:
|
||||
"""
|
||||
Add images to a group.
|
||||
|
||||
Args:
|
||||
group_id: Group UUID
|
||||
board_id: Board UUID
|
||||
image_ids: List of image IDs to add
|
||||
|
||||
Returns:
|
||||
Number of images added
|
||||
"""
|
||||
count = 0
|
||||
|
||||
for image_id in image_ids:
|
||||
stmt = select(BoardImage).where(BoardImage.board_id == board_id, BoardImage.image_id == image_id)
|
||||
board_image = self.db.execute(stmt).scalar_one_or_none()
|
||||
|
||||
if board_image:
|
||||
board_image.group_id = group_id
|
||||
count += 1
|
||||
|
||||
self.db.commit()
|
||||
|
||||
return count
|
||||
|
||||
def remove_images_from_group(self, group_id: UUID, image_ids: list[UUID]) -> int:
|
||||
"""
|
||||
Remove images from a group.
|
||||
|
||||
Args:
|
||||
group_id: Group UUID
|
||||
image_ids: List of image IDs to remove
|
||||
|
||||
Returns:
|
||||
Number of images removed
|
||||
"""
|
||||
count = 0
|
||||
|
||||
for image_id in image_ids:
|
||||
stmt = select(BoardImage).where(BoardImage.group_id == group_id, BoardImage.image_id == image_id)
|
||||
board_image = self.db.execute(stmt).scalar_one_or_none()
|
||||
|
||||
if board_image:
|
||||
board_image.group_id = None
|
||||
count += 1
|
||||
|
||||
self.db.commit()
|
||||
|
||||
return count
|
||||
154
backend/app/boards/schemas.py
Normal file
154
backend/app/boards/schemas.py
Normal file
@@ -0,0 +1,154 @@
|
||||
"""Board Pydantic schemas for request/response validation."""
|
||||
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
||||
|
||||
|
||||
class ViewportState(BaseModel):
|
||||
"""Viewport state for canvas position and zoom."""
|
||||
|
||||
x: float = Field(default=0, description="Horizontal pan position")
|
||||
y: float = Field(default=0, description="Vertical pan position")
|
||||
zoom: float = Field(default=1.0, ge=0.1, le=5.0, description="Zoom level (0.1 to 5.0)")
|
||||
rotation: float = Field(default=0, ge=0, le=360, description="Canvas rotation in degrees (0 to 360)")
|
||||
|
||||
|
||||
class BoardCreate(BaseModel):
|
||||
"""Schema for creating a new board."""
|
||||
|
||||
title: str = Field(..., min_length=1, max_length=255, description="Board title")
|
||||
description: str | None = Field(default=None, description="Optional board description")
|
||||
|
||||
|
||||
class ViewportStateUpdate(BaseModel):
|
||||
"""Schema for updating viewport state only."""
|
||||
|
||||
x: float = Field(..., description="Horizontal pan position")
|
||||
y: float = Field(..., description="Vertical pan position")
|
||||
zoom: float = Field(..., ge=0.1, le=5.0, description="Zoom level (0.1 to 5.0)")
|
||||
rotation: float = Field(..., ge=0, le=360, description="Canvas rotation in degrees (0 to 360)")
|
||||
|
||||
|
||||
class BoardUpdate(BaseModel):
|
||||
"""Schema for updating board metadata."""
|
||||
|
||||
title: str | None = Field(None, min_length=1, max_length=255, description="Board title")
|
||||
description: str | None = Field(None, description="Board description")
|
||||
viewport_state: ViewportState | None = Field(None, description="Viewport state")
|
||||
|
||||
|
||||
class BoardSummary(BaseModel):
|
||||
"""Summary schema for board list view."""
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: UUID
|
||||
title: str
|
||||
description: str | None = None
|
||||
image_count: int = Field(default=0, description="Number of images on board")
|
||||
thumbnail_url: str | None = Field(default=None, description="URL to board thumbnail")
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
class BoardDetail(BaseModel):
|
||||
"""Detailed schema for single board view with all data."""
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: UUID
|
||||
user_id: UUID
|
||||
title: str
|
||||
description: str | None = None
|
||||
viewport_state: ViewportState
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
is_deleted: bool = False
|
||||
|
||||
@field_validator("viewport_state", mode="before")
|
||||
@classmethod
|
||||
def convert_viewport_state(cls, v):
|
||||
"""Convert dict to ViewportState if needed."""
|
||||
if isinstance(v, dict):
|
||||
return ViewportState(**v)
|
||||
return v
|
||||
|
||||
|
||||
class GroupCreate(BaseModel):
|
||||
"""Schema for creating a new group."""
|
||||
|
||||
name: str = Field(..., min_length=1, max_length=255, description="Group name")
|
||||
color: str = Field(..., pattern=r"^#[0-9A-Fa-f]{6}$", description="Hex color code (#RRGGBB)")
|
||||
annotation: str | None = Field(None, max_length=10000, description="Optional text annotation")
|
||||
image_ids: list[UUID] = Field(..., min_items=1, description="List of image IDs to include in group")
|
||||
|
||||
|
||||
class GroupUpdate(BaseModel):
|
||||
"""Schema for updating group metadata."""
|
||||
|
||||
name: str | None = Field(None, min_length=1, max_length=255, description="Group name")
|
||||
color: str | None = Field(None, pattern=r"^#[0-9A-Fa-f]{6}$", description="Hex color code")
|
||||
annotation: str | None = Field(None, max_length=10000, description="Text annotation")
|
||||
|
||||
|
||||
class GroupResponse(BaseModel):
|
||||
"""Response schema for group with member count."""
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: UUID
|
||||
board_id: UUID
|
||||
name: str
|
||||
color: str
|
||||
annotation: str | None = None
|
||||
member_count: int = Field(default=0, description="Number of images in group")
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
class ShareLinkCreate(BaseModel):
|
||||
"""Schema for creating a new share link."""
|
||||
|
||||
permission_level: str = Field(..., pattern=r"^(view-only|view-comment)$", description="Permission level")
|
||||
expires_at: datetime | None = Field(None, description="Optional expiration datetime")
|
||||
|
||||
|
||||
class ShareLinkResponse(BaseModel):
|
||||
"""Response schema for share link."""
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: UUID
|
||||
board_id: UUID
|
||||
token: str
|
||||
permission_level: str
|
||||
created_at: datetime
|
||||
expires_at: datetime | None = None
|
||||
last_accessed_at: datetime | None = None
|
||||
access_count: int = 0
|
||||
is_revoked: bool = False
|
||||
|
||||
|
||||
class CommentCreate(BaseModel):
|
||||
"""Schema for creating a new comment."""
|
||||
|
||||
author_name: str = Field(..., min_length=1, max_length=100, description="Commenter name")
|
||||
content: str = Field(..., min_length=1, max_length=5000, description="Comment text")
|
||||
position: dict | None = Field(None, description="Optional canvas position {x, y}")
|
||||
|
||||
|
||||
class CommentResponse(BaseModel):
|
||||
"""Response schema for comment."""
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: UUID
|
||||
board_id: UUID
|
||||
share_link_id: UUID | None = None
|
||||
author_name: str
|
||||
content: str
|
||||
position: dict | None = None
|
||||
created_at: datetime
|
||||
is_deleted: bool = False
|
||||
84
backend/app/boards/sharing.py
Normal file
84
backend/app/boards/sharing.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""Board sharing functionality."""
|
||||
|
||||
import secrets
|
||||
import string
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database.models.share_link import ShareLink
|
||||
|
||||
|
||||
def generate_secure_token(length: int = 64) -> str:
|
||||
"""
|
||||
Generate a cryptographically secure random token for share links.
|
||||
|
||||
Args:
|
||||
length: Length of the token (default 64 characters)
|
||||
|
||||
Returns:
|
||||
URL-safe random string
|
||||
"""
|
||||
# Use URL-safe characters (alphanumeric + - and _)
|
||||
alphabet = string.ascii_letters + string.digits + "-_"
|
||||
return "".join(secrets.choice(alphabet) for _ in range(length))
|
||||
|
||||
|
||||
def validate_share_link_token(token: str, db: Session) -> ShareLink | None:
|
||||
"""
|
||||
Validate a share link token and return the share link if valid.
|
||||
|
||||
A share link is valid if:
|
||||
- Token exists
|
||||
- Not revoked
|
||||
- Not expired (if expires_at is set)
|
||||
|
||||
Args:
|
||||
token: The share link token
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
ShareLink if valid, None otherwise
|
||||
"""
|
||||
share_link = (
|
||||
db.query(ShareLink)
|
||||
.filter(
|
||||
ShareLink.token == token,
|
||||
ShareLink.is_revoked == False, # noqa: E712
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if share_link is None:
|
||||
return None
|
||||
|
||||
# Check expiration
|
||||
if share_link.expires_at and share_link.expires_at < datetime.utcnow():
|
||||
return None
|
||||
|
||||
# Update access tracking
|
||||
share_link.access_count += 1
|
||||
share_link.last_accessed_at = datetime.utcnow()
|
||||
db.commit()
|
||||
|
||||
return share_link
|
||||
|
||||
|
||||
def check_permission(share_link: ShareLink, required_permission: str) -> bool:
|
||||
"""
|
||||
Check if a share link has the required permission level.
|
||||
|
||||
Args:
|
||||
share_link: The share link to check
|
||||
required_permission: Required permission level ('view-only' or 'view-comment')
|
||||
|
||||
Returns:
|
||||
True if permission granted, False otherwise
|
||||
"""
|
||||
if required_permission == "view-only":
|
||||
# Both view-only and view-comment can view
|
||||
return share_link.permission_level in ("view-only", "view-comment")
|
||||
elif required_permission == "view-comment":
|
||||
# Only view-comment can comment
|
||||
return share_link.permission_level == "view-comment"
|
||||
return False
|
||||
1
backend/app/core/__init__.py
Normal file
1
backend/app/core/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Core application modules."""
|
||||
92
backend/app/core/config.py
Normal file
92
backend/app/core/config.py
Normal file
@@ -0,0 +1,92 @@
|
||||
"""Application configuration."""
|
||||
|
||||
from functools import lru_cache
|
||||
from typing import Any
|
||||
|
||||
from pydantic import PostgresDsn, field_validator
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Application settings."""
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
env_file_encoding="utf-8",
|
||||
case_sensitive=False,
|
||||
extra="ignore",
|
||||
)
|
||||
|
||||
# Application
|
||||
APP_NAME: str = "Reference Board Viewer"
|
||||
APP_VERSION: str = "1.0.0"
|
||||
DEBUG: bool = False
|
||||
API_V1_PREFIX: str = "/api/v1"
|
||||
|
||||
# Database
|
||||
DATABASE_URL: PostgresDsn
|
||||
DATABASE_POOL_SIZE: int = 20
|
||||
DATABASE_MAX_OVERFLOW: int = 0
|
||||
|
||||
# JWT Authentication
|
||||
SECRET_KEY: str
|
||||
ALGORITHM: str = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
|
||||
|
||||
# MinIO Storage
|
||||
MINIO_ENDPOINT: str
|
||||
MINIO_ACCESS_KEY: str
|
||||
MINIO_SECRET_KEY: str
|
||||
MINIO_BUCKET: str = "webref"
|
||||
MINIO_SECURE: bool = False
|
||||
|
||||
# CORS
|
||||
CORS_ORIGINS: list[str] = ["http://localhost:5173", "http://localhost:3000"]
|
||||
|
||||
@field_validator("CORS_ORIGINS", mode="before")
|
||||
@classmethod
|
||||
def parse_cors_origins(cls, v: Any) -> list[str]:
|
||||
"""Parse CORS origins from string or list."""
|
||||
if isinstance(v, str):
|
||||
return [origin.strip() for origin in v.split(",")]
|
||||
return v
|
||||
|
||||
# File Upload
|
||||
MAX_FILE_SIZE: int = 52428800 # 50MB
|
||||
MAX_BATCH_SIZE: int = 524288000 # 500MB
|
||||
ALLOWED_MIME_TYPES: list[str] = [
|
||||
"image/jpeg",
|
||||
"image/png",
|
||||
"image/gif",
|
||||
"image/webp",
|
||||
"image/svg+xml",
|
||||
]
|
||||
|
||||
@field_validator("ALLOWED_MIME_TYPES", mode="before")
|
||||
@classmethod
|
||||
def parse_mime_types(cls, v: Any) -> list[str]:
|
||||
"""Parse MIME types from string or list."""
|
||||
if isinstance(v, str):
|
||||
return [mime.strip() for mime in v.split(",")]
|
||||
return v
|
||||
|
||||
# Performance
|
||||
REQUEST_TIMEOUT: int = 30
|
||||
MAX_CONCURRENT_UPLOADS: int = 10
|
||||
|
||||
# Security
|
||||
BCRYPT_ROUNDS: int = 12
|
||||
PASSWORD_MIN_LENGTH: int = 8
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL: str = "INFO"
|
||||
|
||||
|
||||
@lru_cache
|
||||
def get_settings() -> Settings:
|
||||
"""Get cached application settings."""
|
||||
return Settings()
|
||||
|
||||
|
||||
# Export settings instance
|
||||
settings = get_settings()
|
||||
38
backend/app/core/constants.py
Normal file
38
backend/app/core/constants.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Application-wide constants."""
|
||||
|
||||
# File upload limits
|
||||
MAX_IMAGE_SIZE = 52_428_800 # 50MB in bytes
|
||||
MAX_ZIP_SIZE = 209_715_200 # 200MB in bytes
|
||||
|
||||
# Image processing
|
||||
MAX_IMAGE_DIMENSION = 10_000 # Max width or height in pixels
|
||||
THUMBNAIL_SIZES = {
|
||||
"low": 800, # For slow connections (<1 Mbps)
|
||||
"medium": 1600, # For medium connections (1-5 Mbps)
|
||||
"high": 3200, # For fast connections (>5 Mbps)
|
||||
}
|
||||
|
||||
# Pagination defaults
|
||||
DEFAULT_PAGE_SIZE = 50
|
||||
MAX_PAGE_SIZE = 100
|
||||
|
||||
# Board limits
|
||||
MAX_BOARD_TITLE_LENGTH = 255
|
||||
MAX_BOARD_DESCRIPTION_LENGTH = 1000
|
||||
MAX_IMAGES_PER_BOARD = 1000
|
||||
|
||||
# Authentication
|
||||
TOKEN_EXPIRE_HOURS = 168 # 7 days
|
||||
PASSWORD_MIN_LENGTH = 8
|
||||
|
||||
# Supported image formats
|
||||
ALLOWED_MIME_TYPES = {
|
||||
"image/jpeg",
|
||||
"image/jpg",
|
||||
"image/png",
|
||||
"image/gif",
|
||||
"image/webp",
|
||||
"image/svg+xml",
|
||||
}
|
||||
|
||||
ALLOWED_EXTENSIONS = {".jpg", ".jpeg", ".png", ".gif", ".webp", ".svg"}
|
||||
79
backend/app/core/deps.py
Normal file
79
backend/app/core/deps.py
Normal file
@@ -0,0 +1,79 @@
|
||||
"""Dependency injection utilities."""
|
||||
|
||||
from typing import Annotated
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.auth.jwt import decode_access_token
|
||||
from app.database.models.user import User
|
||||
from app.database.session import get_db
|
||||
|
||||
# Database session dependency
|
||||
DatabaseSession = Annotated[Session, Depends(get_db)]
|
||||
|
||||
# Security scheme for JWT Bearer token
|
||||
security = HTTPBearer()
|
||||
|
||||
|
||||
def get_current_user(
|
||||
credentials: HTTPAuthorizationCredentials = Depends(security), db: Session = Depends(get_db)
|
||||
) -> User:
|
||||
"""
|
||||
Get current authenticated user from JWT token.
|
||||
|
||||
Args:
|
||||
credentials: HTTP Authorization Bearer token
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Current authenticated user
|
||||
|
||||
Raises:
|
||||
HTTPException: If token is invalid or user not found
|
||||
"""
|
||||
# Decode token
|
||||
token = credentials.credentials
|
||||
payload = decode_access_token(token)
|
||||
|
||||
if payload is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid authentication credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
# Extract user ID from token
|
||||
user_id_str: str = payload.get("sub")
|
||||
if user_id_str is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid token payload",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
try:
|
||||
user_id = UUID(user_id_str)
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid user ID in token",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
) from None
|
||||
|
||||
# Get user from database
|
||||
user = db.query(User).filter(User.id == user_id).first()
|
||||
|
||||
if user is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="User not found",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
if not user.is_active:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User account is deactivated")
|
||||
|
||||
return user
|
||||
67
backend/app/core/errors.py
Normal file
67
backend/app/core/errors.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Custom exception classes."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
|
||||
class WebRefException(Exception):
|
||||
"""Base exception for all custom exceptions."""
|
||||
|
||||
def __init__(self, message: str, status_code: int = 500, details: dict[str, Any] | None = None):
|
||||
self.message = message
|
||||
self.status_code = status_code
|
||||
self.details = details or {}
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class ValidationError(WebRefException):
|
||||
"""Validation error."""
|
||||
|
||||
def __init__(self, message: str, details: dict[str, Any] | None = None):
|
||||
super().__init__(message, status_code=422, details=details)
|
||||
|
||||
|
||||
class AuthenticationError(WebRefException):
|
||||
"""Authentication error."""
|
||||
|
||||
def __init__(self, message: str = "Authentication failed"):
|
||||
super().__init__(message, status_code=401)
|
||||
|
||||
|
||||
class AuthorizationError(WebRefException):
|
||||
"""Authorization error."""
|
||||
|
||||
def __init__(self, message: str = "Insufficient permissions"):
|
||||
super().__init__(message, status_code=403)
|
||||
|
||||
|
||||
class NotFoundError(WebRefException):
|
||||
"""Resource not found error."""
|
||||
|
||||
def __init__(self, resource: str, resource_id: str | None = None):
|
||||
message = f"{resource} not found"
|
||||
if resource_id:
|
||||
message = f"{resource} with id {resource_id} not found"
|
||||
super().__init__(message, status_code=404)
|
||||
|
||||
|
||||
class ConflictError(WebRefException):
|
||||
"""Resource conflict error."""
|
||||
|
||||
def __init__(self, message: str):
|
||||
super().__init__(message, status_code=409)
|
||||
|
||||
|
||||
class FileTooLargeError(WebRefException):
|
||||
"""File size exceeds limit."""
|
||||
|
||||
def __init__(self, max_size: int):
|
||||
message = f"File size exceeds maximum allowed size of {max_size} bytes"
|
||||
super().__init__(message, status_code=413)
|
||||
|
||||
|
||||
class UnsupportedFileTypeError(WebRefException):
|
||||
"""Unsupported file type."""
|
||||
|
||||
def __init__(self, file_type: str, allowed_types: list[str]):
|
||||
message = f"File type '{file_type}' not supported. Allowed types: {', '.join(allowed_types)}"
|
||||
super().__init__(message, status_code=415)
|
||||
31
backend/app/core/logging.py
Normal file
31
backend/app/core/logging.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""Logging configuration."""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
|
||||
def setup_logging() -> None:
|
||||
"""Configure application logging."""
|
||||
|
||||
# Get log level from settings
|
||||
log_level = getattr(logging, settings.LOG_LEVEL.upper(), logging.INFO)
|
||||
|
||||
# Configure root logger
|
||||
logging.basicConfig(
|
||||
level=log_level,
|
||||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
handlers=[logging.StreamHandler(sys.stdout)],
|
||||
)
|
||||
|
||||
# Set library log levels
|
||||
logging.getLogger("uvicorn").setLevel(logging.INFO)
|
||||
logging.getLogger("uvicorn.access").setLevel(logging.INFO)
|
||||
logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
|
||||
logging.getLogger("boto3").setLevel(logging.WARNING)
|
||||
logging.getLogger("botocore").setLevel(logging.WARNING)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info(f"Logging configured with level: {settings.LOG_LEVEL}")
|
||||
27
backend/app/core/middleware.py
Normal file
27
backend/app/core/middleware.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""CORS and other middleware configuration."""
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
|
||||
def setup_middleware(app: FastAPI) -> None:
|
||||
"""Configure application middleware."""
|
||||
|
||||
# CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=settings.CORS_ORIGINS,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Security headers (optional, add more as needed)
|
||||
# Note: TrustedHostMiddleware not added by default in dev
|
||||
# Uncomment for production:
|
||||
# app.add_middleware(
|
||||
# TrustedHostMiddleware,
|
||||
# allowed_hosts=["yourdomain.com", "*.yourdomain.com"]
|
||||
# )
|
||||
69
backend/app/core/ownership.py
Normal file
69
backend/app/core/ownership.py
Normal file
@@ -0,0 +1,69 @@
|
||||
"""Ownership verification utilities."""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database.models.board import Board
|
||||
|
||||
|
||||
def verify_board_ownership_sync(db: Session, board_id: UUID, user_id: UUID) -> Board:
|
||||
"""
|
||||
Verify board ownership (synchronous).
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
board_id: Board UUID
|
||||
user_id: User UUID
|
||||
|
||||
Returns:
|
||||
Board instance if owned by user
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if board not found or not owned by user
|
||||
"""
|
||||
stmt = select(Board).where(
|
||||
Board.id == board_id,
|
||||
Board.user_id == user_id,
|
||||
Board.is_deleted == False, # noqa: E712
|
||||
)
|
||||
|
||||
board = db.execute(stmt).scalar_one_or_none()
|
||||
|
||||
if not board:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Board {board_id} not found")
|
||||
|
||||
return board
|
||||
|
||||
|
||||
async def verify_board_ownership_async(db: AsyncSession, board_id: UUID, user_id: UUID) -> Board:
|
||||
"""
|
||||
Verify board ownership (asynchronous).
|
||||
|
||||
Args:
|
||||
db: Async database session
|
||||
board_id: Board UUID
|
||||
user_id: User UUID
|
||||
|
||||
Returns:
|
||||
Board instance if owned by user
|
||||
|
||||
Raises:
|
||||
HTTPException: 404 if board not found or not owned by user
|
||||
"""
|
||||
stmt = select(Board).where(
|
||||
Board.id == board_id,
|
||||
Board.user_id == user_id,
|
||||
Board.is_deleted == False, # noqa: E712
|
||||
)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
board = result.scalar_one_or_none()
|
||||
|
||||
if not board:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Board {board_id} not found")
|
||||
|
||||
return board
|
||||
119
backend/app/core/repository.py
Normal file
119
backend/app/core/repository.py
Normal file
@@ -0,0 +1,119 @@
|
||||
"""Base repository with common database operations."""
|
||||
|
||||
from typing import TypeVar
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
# Type variable for model classes
|
||||
ModelType = TypeVar("ModelType")
|
||||
|
||||
|
||||
class BaseRepository[ModelType]:
|
||||
"""Base repository with common CRUD operations."""
|
||||
|
||||
def __init__(self, model: type[ModelType], db: Session | AsyncSession):
|
||||
"""
|
||||
Initialize repository.
|
||||
|
||||
Args:
|
||||
model: SQLAlchemy model class
|
||||
db: Database session (sync or async)
|
||||
"""
|
||||
self.model = model
|
||||
self.db = db
|
||||
|
||||
def get_by_id_sync(self, id: UUID) -> ModelType | None:
|
||||
"""
|
||||
Get entity by ID (synchronous).
|
||||
|
||||
Args:
|
||||
id: Entity UUID
|
||||
|
||||
Returns:
|
||||
Entity if found, None otherwise
|
||||
"""
|
||||
return self.db.query(self.model).filter(self.model.id == id).first()
|
||||
|
||||
async def get_by_id_async(self, id: UUID) -> ModelType | None:
|
||||
"""
|
||||
Get entity by ID (asynchronous).
|
||||
|
||||
Args:
|
||||
id: Entity UUID
|
||||
|
||||
Returns:
|
||||
Entity if found, None otherwise
|
||||
"""
|
||||
stmt = select(self.model).where(self.model.id == id)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
def count_sync(self, **filters) -> int:
|
||||
"""
|
||||
Count entities with optional filters (synchronous).
|
||||
|
||||
Args:
|
||||
**filters: Column filters (column_name=value)
|
||||
|
||||
Returns:
|
||||
Count of matching entities
|
||||
"""
|
||||
query = self.db.query(func.count(self.model.id))
|
||||
for key, value in filters.items():
|
||||
query = query.filter(getattr(self.model, key) == value)
|
||||
return query.scalar()
|
||||
|
||||
async def count_async(self, **filters) -> int:
|
||||
"""
|
||||
Count entities with optional filters (asynchronous).
|
||||
|
||||
Args:
|
||||
**filters: Column filters (column_name=value)
|
||||
|
||||
Returns:
|
||||
Count of matching entities
|
||||
"""
|
||||
stmt = select(func.count(self.model.id))
|
||||
for key, value in filters.items():
|
||||
stmt = stmt.where(getattr(self.model, key) == value)
|
||||
result = await self.db.execute(stmt)
|
||||
return result.scalar_one()
|
||||
|
||||
def delete_sync(self, id: UUID) -> bool:
|
||||
"""
|
||||
Delete entity by ID (synchronous).
|
||||
|
||||
Args:
|
||||
id: Entity UUID
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found
|
||||
"""
|
||||
entity = self.get_by_id_sync(id)
|
||||
if not entity:
|
||||
return False
|
||||
|
||||
self.db.delete(entity)
|
||||
self.db.commit()
|
||||
return True
|
||||
|
||||
async def delete_async(self, id: UUID) -> bool:
|
||||
"""
|
||||
Delete entity by ID (asynchronous).
|
||||
|
||||
Args:
|
||||
id: Entity UUID
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found
|
||||
"""
|
||||
entity = await self.get_by_id_async(id)
|
||||
if not entity:
|
||||
return False
|
||||
|
||||
await self.db.delete(entity)
|
||||
await self.db.commit()
|
||||
return True
|
||||
75
backend/app/core/responses.py
Normal file
75
backend/app/core/responses.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Standard response utilities."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class ErrorResponse:
|
||||
"""Standard error response formats."""
|
||||
|
||||
@staticmethod
|
||||
def not_found(resource: str = "Resource") -> dict[str, Any]:
|
||||
"""404 Not Found response."""
|
||||
return {
|
||||
"status_code": status.HTTP_404_NOT_FOUND,
|
||||
"detail": f"{resource} not found",
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def forbidden(message: str = "Access denied") -> dict[str, Any]:
|
||||
"""403 Forbidden response."""
|
||||
return {
|
||||
"status_code": status.HTTP_403_FORBIDDEN,
|
||||
"detail": message,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def unauthorized(message: str = "Authentication required") -> dict[str, Any]:
|
||||
"""401 Unauthorized response."""
|
||||
return {
|
||||
"status_code": status.HTTP_401_UNAUTHORIZED,
|
||||
"detail": message,
|
||||
"headers": {"WWW-Authenticate": "Bearer"},
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def bad_request(message: str) -> dict[str, Any]:
|
||||
"""400 Bad Request response."""
|
||||
return {
|
||||
"status_code": status.HTTP_400_BAD_REQUEST,
|
||||
"detail": message,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def conflict(message: str) -> dict[str, Any]:
|
||||
"""409 Conflict response."""
|
||||
return {
|
||||
"status_code": status.HTTP_409_CONFLICT,
|
||||
"detail": message,
|
||||
}
|
||||
|
||||
|
||||
class SuccessResponse:
|
||||
"""Standard success response formats."""
|
||||
|
||||
@staticmethod
|
||||
def created(data: dict[str, Any], message: str = "Created successfully") -> dict[str, Any]:
|
||||
"""201 Created response."""
|
||||
return {
|
||||
"message": message,
|
||||
"data": data,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def ok(data: dict[str, Any] | None = None, message: str = "Success") -> dict[str, Any]:
|
||||
"""200 OK response."""
|
||||
response = {"message": message}
|
||||
if data:
|
||||
response["data"] = data
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def no_content() -> None:
|
||||
"""204 No Content response."""
|
||||
return None
|
||||
57
backend/app/core/schemas.py
Normal file
57
backend/app/core/schemas.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""Base Pydantic schemas."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
|
||||
class BaseSchema(BaseModel):
|
||||
"""Base schema with common configuration."""
|
||||
|
||||
model_config = ConfigDict(from_attributes=True, populate_by_name=True, json_schema_extra={"example": {}})
|
||||
|
||||
|
||||
class TimestampSchema(BaseSchema):
|
||||
"""Schema with timestamp fields."""
|
||||
|
||||
created_at: datetime = Field(..., description="Creation timestamp")
|
||||
updated_at: datetime | None = Field(None, description="Last update timestamp")
|
||||
|
||||
|
||||
class IDSchema(BaseSchema):
|
||||
"""Schema with ID field."""
|
||||
|
||||
id: UUID = Field(..., description="Unique identifier")
|
||||
|
||||
|
||||
class ResponseSchema(BaseSchema):
|
||||
"""Generic response schema."""
|
||||
|
||||
message: str = Field(..., description="Response message")
|
||||
data: dict[str, Any] | None = Field(None, description="Response data")
|
||||
|
||||
|
||||
class ErrorSchema(BaseSchema):
|
||||
"""Error response schema."""
|
||||
|
||||
error: str = Field(..., description="Error message")
|
||||
details: dict[str, Any] | None = Field(None, description="Error details")
|
||||
status_code: int = Field(..., description="HTTP status code")
|
||||
|
||||
|
||||
class PaginationSchema(BaseSchema):
|
||||
"""Pagination metadata schema."""
|
||||
|
||||
total: int = Field(..., description="Total number of items")
|
||||
page: int = Field(..., description="Current page number")
|
||||
page_size: int = Field(..., description="Items per page")
|
||||
total_pages: int = Field(..., description="Total number of pages")
|
||||
|
||||
|
||||
class PaginatedResponse(BaseSchema):
|
||||
"""Paginated response schema."""
|
||||
|
||||
items: list[Any] = Field(..., description="List of items")
|
||||
pagination: PaginationSchema = Field(..., description="Pagination metadata")
|
||||
163
backend/app/core/storage.py
Normal file
163
backend/app/core/storage.py
Normal file
@@ -0,0 +1,163 @@
|
||||
"""MinIO storage client utilities."""
|
||||
|
||||
import logging
|
||||
from io import BytesIO
|
||||
from typing import BinaryIO
|
||||
|
||||
import boto3
|
||||
from botocore.client import Config
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StorageClient:
|
||||
"""MinIO storage client wrapper."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize MinIO client."""
|
||||
self.client = boto3.client(
|
||||
"s3",
|
||||
endpoint_url=f"{'https' if settings.MINIO_SECURE else 'http'}://{settings.MINIO_ENDPOINT}",
|
||||
aws_access_key_id=settings.MINIO_ACCESS_KEY,
|
||||
aws_secret_access_key=settings.MINIO_SECRET_KEY,
|
||||
config=Config(signature_version="s3v4"),
|
||||
)
|
||||
self.bucket = settings.MINIO_BUCKET
|
||||
self._ensure_bucket_exists()
|
||||
|
||||
def put_object(self, bucket_name: str, object_name: str, data: BinaryIO, length: int, content_type: str):
|
||||
"""MinIO-compatible put_object method."""
|
||||
return self.upload_file(data, object_name, content_type)
|
||||
|
||||
def remove_object(self, bucket_name: str, object_name: str):
|
||||
"""MinIO-compatible remove_object method."""
|
||||
return self.delete_file(object_name)
|
||||
|
||||
def _ensure_bucket_exists(self) -> None:
|
||||
"""Create bucket if it doesn't exist."""
|
||||
try:
|
||||
self.client.head_bucket(Bucket=self.bucket)
|
||||
except ClientError:
|
||||
logger.info(f"Creating bucket: {self.bucket}")
|
||||
self.client.create_bucket(Bucket=self.bucket)
|
||||
|
||||
def upload_file(self, file_data: BinaryIO, object_name: str, content_type: str) -> str:
|
||||
"""Upload file to MinIO.
|
||||
|
||||
Args:
|
||||
file_data: File data to upload
|
||||
object_name: S3 object name (path)
|
||||
content_type: MIME type of the file
|
||||
|
||||
Returns:
|
||||
str: Object URL
|
||||
|
||||
Raises:
|
||||
Exception: If upload fails
|
||||
"""
|
||||
try:
|
||||
self.client.upload_fileobj(
|
||||
file_data,
|
||||
self.bucket,
|
||||
object_name,
|
||||
ExtraArgs={"ContentType": content_type},
|
||||
)
|
||||
return f"{settings.MINIO_ENDPOINT}/{self.bucket}/{object_name}"
|
||||
except ClientError as e:
|
||||
logger.error(f"Failed to upload file {object_name}: {e}")
|
||||
raise
|
||||
|
||||
def download_file(self, object_name: str) -> BytesIO:
|
||||
"""Download file from MinIO.
|
||||
|
||||
Args:
|
||||
object_name: S3 object name (path)
|
||||
|
||||
Returns:
|
||||
BytesIO: File data
|
||||
|
||||
Raises:
|
||||
Exception: If download fails
|
||||
"""
|
||||
try:
|
||||
file_data = BytesIO()
|
||||
self.client.download_fileobj(self.bucket, object_name, file_data)
|
||||
file_data.seek(0)
|
||||
return file_data
|
||||
except ClientError as e:
|
||||
logger.error(f"Failed to download file {object_name}: {e}")
|
||||
raise
|
||||
|
||||
def get_object(self, object_name: str) -> bytes | None:
|
||||
"""Get object as bytes from MinIO.
|
||||
|
||||
Args:
|
||||
object_name: S3 object name (path)
|
||||
|
||||
Returns:
|
||||
bytes: File data or None if not found
|
||||
|
||||
Raises:
|
||||
Exception: If download fails for reasons other than not found
|
||||
"""
|
||||
try:
|
||||
file_data = self.download_file(object_name)
|
||||
return file_data.read()
|
||||
except ClientError as e:
|
||||
if e.response["Error"]["Code"] == "404":
|
||||
return None
|
||||
logger.error(f"Failed to get object {object_name}: {e}")
|
||||
raise
|
||||
|
||||
def delete_file(self, object_name: str) -> None:
|
||||
"""Delete file from MinIO.
|
||||
|
||||
Args:
|
||||
object_name: S3 object name (path)
|
||||
|
||||
Raises:
|
||||
Exception: If deletion fails
|
||||
"""
|
||||
try:
|
||||
self.client.delete_object(Bucket=self.bucket, Key=object_name)
|
||||
except ClientError as e:
|
||||
logger.error(f"Failed to delete file {object_name}: {e}")
|
||||
raise
|
||||
|
||||
def file_exists(self, object_name: str) -> bool:
|
||||
"""Check if file exists in MinIO.
|
||||
|
||||
Args:
|
||||
object_name: S3 object name (path)
|
||||
|
||||
Returns:
|
||||
bool: True if file exists, False otherwise
|
||||
"""
|
||||
try:
|
||||
self.client.head_object(Bucket=self.bucket, Key=object_name)
|
||||
return True
|
||||
except ClientError:
|
||||
return False
|
||||
|
||||
|
||||
# Global storage client instance
|
||||
storage_client = StorageClient()
|
||||
|
||||
|
||||
def get_storage_client() -> StorageClient:
|
||||
"""Get the global storage client instance."""
|
||||
return storage_client
|
||||
|
||||
|
||||
# Compatibility methods for MinIO-style API
|
||||
def put_object(bucket_name: str, object_name: str, data: BinaryIO, length: int, content_type: str):
|
||||
"""MinIO-compatible put_object method."""
|
||||
storage_client.upload_file(data, object_name, content_type)
|
||||
|
||||
|
||||
def remove_object(bucket_name: str, object_name: str):
|
||||
"""MinIO-compatible remove_object method."""
|
||||
storage_client.delete_file(object_name)
|
||||
44
backend/app/core/tasks.py
Normal file
44
backend/app/core/tasks.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Background task utilities for long-running operations."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
|
||||
|
||||
class BackgroundTasks:
|
||||
"""Simple background task manager using FastAPI BackgroundTasks."""
|
||||
|
||||
@staticmethod
|
||||
async def run_in_background(func: Callable, *args, **kwargs):
|
||||
"""
|
||||
Run function in background.
|
||||
|
||||
For now, uses asyncio to run tasks in background.
|
||||
In production, consider Celery or similar for distributed tasks.
|
||||
|
||||
Args:
|
||||
func: Function to run
|
||||
*args: Positional arguments
|
||||
**kwargs: Keyword arguments
|
||||
"""
|
||||
asyncio.create_task(func(*args, **kwargs))
|
||||
|
||||
|
||||
async def generate_thumbnails_task(image_id: str, storage_path: str, contents: bytes):
|
||||
"""
|
||||
Background task to generate thumbnails.
|
||||
|
||||
Args:
|
||||
image_id: Image ID
|
||||
storage_path: Original image storage path
|
||||
contents: Image file contents
|
||||
"""
|
||||
from uuid import UUID
|
||||
|
||||
from app.images.processing import generate_thumbnails
|
||||
|
||||
# Generate thumbnails
|
||||
generate_thumbnails(UUID(image_id), storage_path, contents)
|
||||
|
||||
# Update image metadata with thumbnail paths
|
||||
# This would require database access - for now, thumbnails are generated synchronously
|
||||
pass
|
||||
1
backend/app/database/__init__.py
Normal file
1
backend/app/database/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Database models and session management."""
|
||||
29
backend/app/database/base.py
Normal file
29
backend/app/database/base.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""Base model for all database models."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from sqlalchemy import Column, DateTime
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.orm import DeclarativeBase, declared_attr
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
"""Base class for all database models."""
|
||||
|
||||
# Generate __tablename__ automatically from class name
|
||||
@declared_attr.directive
|
||||
def __tablename__(self) -> str:
|
||||
"""Generate table name from class name."""
|
||||
# Convert CamelCase to snake_case
|
||||
name = self.__name__
|
||||
return "".join(["_" + c.lower() if c.isupper() else c for c in name]).lstrip("_")
|
||||
|
||||
# Common columns for all models
|
||||
id: Any = Column(UUID(as_uuid=True), primary_key=True, default=uuid4)
|
||||
created_at: Any = Column(DateTime, default=datetime.utcnow, nullable=False)
|
||||
|
||||
def dict(self) -> dict[str, Any]:
|
||||
"""Convert model to dictionary."""
|
||||
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
|
||||
19
backend/app/database/models/__init__.py
Normal file
19
backend/app/database/models/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Database models."""
|
||||
|
||||
from app.database.models.board import Board
|
||||
from app.database.models.board_image import BoardImage
|
||||
from app.database.models.comment import Comment
|
||||
from app.database.models.group import Group
|
||||
from app.database.models.image import Image
|
||||
from app.database.models.share_link import ShareLink
|
||||
from app.database.models.user import User
|
||||
|
||||
__all__ = [
|
||||
"User",
|
||||
"Board",
|
||||
"Image",
|
||||
"BoardImage",
|
||||
"Group",
|
||||
"ShareLink",
|
||||
"Comment",
|
||||
]
|
||||
64
backend/app/database/models/board.py
Normal file
64
backend/app/database/models/board.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""Board database model."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from sqlalchemy import Boolean, DateTime, ForeignKey, String, Text
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy.dialects.postgresql import UUID as PGUUID
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.database.base import Base
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from app.database.models.board_image import BoardImage
|
||||
from app.database.models.comment import Comment
|
||||
from app.database.models.group import Group
|
||||
from app.database.models.share_link import ShareLink
|
||||
from app.database.models.user import User
|
||||
|
||||
|
||||
class Board(Base):
|
||||
"""
|
||||
Board model representing a reference board (canvas) containing images.
|
||||
|
||||
A board is owned by a user and contains images arranged on an infinite canvas
|
||||
with a specific viewport state (zoom, pan, rotation).
|
||||
"""
|
||||
|
||||
__tablename__ = "boards"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
|
||||
user_id: Mapped[UUID] = mapped_column(
|
||||
PGUUID(as_uuid=True), ForeignKey("users.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
title: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
description: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
|
||||
viewport_state: Mapped[dict] = mapped_column(
|
||||
JSONB,
|
||||
nullable=False,
|
||||
default=lambda: {"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, default=datetime.utcnow)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow
|
||||
)
|
||||
is_deleted: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
|
||||
|
||||
# Relationships
|
||||
user: Mapped["User"] = relationship("User", back_populates="boards")
|
||||
board_images: Mapped[list["BoardImage"]] = relationship(
|
||||
"BoardImage", back_populates="board", cascade="all, delete-orphan"
|
||||
)
|
||||
groups: Mapped[list["Group"]] = relationship("Group", back_populates="board", cascade="all, delete-orphan")
|
||||
share_links: Mapped[list["ShareLink"]] = relationship(
|
||||
"ShareLink", back_populates="board", cascade="all, delete-orphan"
|
||||
)
|
||||
comments: Mapped[list["Comment"]] = relationship("Comment", back_populates="board", cascade="all, delete-orphan")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of Board."""
|
||||
return f"<Board(id={self.id}, title='{self.title}', user_id={self.user_id})>"
|
||||
67
backend/app/database/models/board_image.py
Normal file
67
backend/app/database/models/board_image.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""BoardImage database model - junction table for boards and images."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, Integer
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy.dialects.postgresql import UUID as PGUUID
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.database.base import Base
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from app.database.models.board import Board
|
||||
from app.database.models.group import Group
|
||||
from app.database.models.image import Image
|
||||
|
||||
|
||||
class BoardImage(Base):
|
||||
"""
|
||||
BoardImage model - junction table connecting boards and images.
|
||||
|
||||
Stores position, transformations, and z-order for each image on a board.
|
||||
"""
|
||||
|
||||
__tablename__ = "board_images"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
|
||||
board_id: Mapped[UUID] = mapped_column(
|
||||
PGUUID(as_uuid=True), ForeignKey("boards.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
image_id: Mapped[UUID] = mapped_column(
|
||||
PGUUID(as_uuid=True), ForeignKey("images.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
|
||||
position: Mapped[dict] = mapped_column(JSONB, nullable=False)
|
||||
transformations: Mapped[dict] = mapped_column(
|
||||
JSONB,
|
||||
nullable=False,
|
||||
default=lambda: {
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
)
|
||||
z_order: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
|
||||
group_id: Mapped[UUID | None] = mapped_column(
|
||||
PGUUID(as_uuid=True), ForeignKey("groups.id", ondelete="SET NULL"), nullable=True
|
||||
)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, default=datetime.utcnow)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow
|
||||
)
|
||||
|
||||
# Relationships
|
||||
board: Mapped["Board"] = relationship("Board", back_populates="board_images")
|
||||
image: Mapped["Image"] = relationship("Image", back_populates="board_images")
|
||||
group: Mapped["Group | None"] = relationship("Group", back_populates="board_images")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of BoardImage."""
|
||||
return f"<BoardImage(id={self.id}, board_id={self.board_id}, image_id={self.image_id})>"
|
||||
32
backend/app/database/models/comment.py
Normal file
32
backend/app/database/models/comment.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""Comment model for board annotations."""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, String, Text
|
||||
from sqlalchemy.dialects.postgresql import JSONB, UUID
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from app.database.base import Base
|
||||
|
||||
|
||||
class Comment(Base):
|
||||
"""Comment model representing viewer comments on shared boards."""
|
||||
|
||||
__tablename__ = "comments"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
board_id = Column(UUID(as_uuid=True), ForeignKey("boards.id", ondelete="CASCADE"), nullable=False)
|
||||
share_link_id = Column(UUID(as_uuid=True), ForeignKey("share_links.id", ondelete="SET NULL"), nullable=True)
|
||||
author_name = Column(String(100), nullable=False)
|
||||
content = Column(Text, nullable=False)
|
||||
position = Column(JSONB, nullable=True) # Optional canvas position reference
|
||||
created_at = Column(DateTime, nullable=False, default=datetime.utcnow)
|
||||
is_deleted = Column(Boolean, nullable=False, default=False)
|
||||
|
||||
# Relationships
|
||||
board = relationship("Board", back_populates="comments")
|
||||
share_link = relationship("ShareLink", back_populates="comments")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Comment(id={self.id}, board_id={self.board_id}, author={self.author_name})>"
|
||||
47
backend/app/database/models/group.py
Normal file
47
backend/app/database/models/group.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Group database model."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, String, Text
|
||||
from sqlalchemy.dialects.postgresql import UUID as PGUUID
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.database.base import Base
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from app.database.models.board import Board
|
||||
from app.database.models.board_image import BoardImage
|
||||
|
||||
|
||||
class Group(Base):
|
||||
"""
|
||||
Group model for organizing images with labels and annotations.
|
||||
|
||||
Groups contain multiple images that can be moved together and have
|
||||
shared visual indicators (color, annotation text).
|
||||
"""
|
||||
|
||||
__tablename__ = "groups"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
|
||||
board_id: Mapped[UUID] = mapped_column(
|
||||
PGUUID(as_uuid=True), ForeignKey("boards.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
color: Mapped[str] = mapped_column(String(7), nullable=False) # Hex color #RRGGBB
|
||||
annotation: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, default=datetime.utcnow)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow
|
||||
)
|
||||
|
||||
# Relationships
|
||||
board: Mapped["Board"] = relationship("Board", back_populates="groups")
|
||||
board_images: Mapped[list["BoardImage"]] = relationship("BoardImage", back_populates="group")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of Group."""
|
||||
return f"<Group(id={self.id}, name='{self.name}', board_id={self.board_id})>"
|
||||
52
backend/app/database/models/image.py
Normal file
52
backend/app/database/models/image.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""Image database model."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from sqlalchemy import BigInteger, DateTime, ForeignKey, Integer, String
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy.dialects.postgresql import UUID as PGUUID
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.database.base import Base
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from app.database.models.board_image import BoardImage
|
||||
from app.database.models.user import User
|
||||
|
||||
|
||||
class Image(Base):
|
||||
"""
|
||||
Image model representing uploaded image files.
|
||||
|
||||
Images are stored in MinIO and can be reused across multiple boards.
|
||||
Reference counting tracks how many boards use each image.
|
||||
"""
|
||||
|
||||
__tablename__ = "images"
|
||||
|
||||
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
|
||||
user_id: Mapped[UUID] = mapped_column(
|
||||
PGUUID(as_uuid=True), ForeignKey("users.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
filename: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
storage_path: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
file_size: Mapped[int] = mapped_column(BigInteger, nullable=False)
|
||||
mime_type: Mapped[str] = mapped_column(String(100), nullable=False)
|
||||
width: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
height: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
metadata: Mapped[dict] = mapped_column(JSONB, nullable=False)
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, default=datetime.utcnow)
|
||||
reference_count: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
|
||||
|
||||
# Relationships
|
||||
user: Mapped["User"] = relationship("User", back_populates="images")
|
||||
board_images: Mapped[list["BoardImage"]] = relationship(
|
||||
"BoardImage", back_populates="image", cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""String representation of Image."""
|
||||
return f"<Image(id={self.id}, filename='{self.filename}', user_id={self.user_id})>"
|
||||
33
backend/app/database/models/share_link.py
Normal file
33
backend/app/database/models/share_link.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""ShareLink model for board sharing functionality."""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from app.database.base import Base
|
||||
|
||||
|
||||
class ShareLink(Base):
|
||||
"""ShareLink model representing shareable board links with permissions."""
|
||||
|
||||
__tablename__ = "share_links"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
board_id = Column(UUID(as_uuid=True), ForeignKey("boards.id", ondelete="CASCADE"), nullable=False)
|
||||
token = Column(String(64), unique=True, nullable=False, index=True)
|
||||
permission_level = Column(String(20), nullable=False) # 'view-only' or 'view-comment'
|
||||
created_at = Column(DateTime, nullable=False, default=datetime.utcnow)
|
||||
expires_at = Column(DateTime, nullable=True)
|
||||
last_accessed_at = Column(DateTime, nullable=True)
|
||||
access_count = Column(Integer, nullable=False, default=0)
|
||||
is_revoked = Column(Boolean, nullable=False, default=False)
|
||||
|
||||
# Relationships
|
||||
board = relationship("Board", back_populates="share_links")
|
||||
comments = relationship("Comment", back_populates="share_link", cascade="all, delete-orphan")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<ShareLink(id={self.id}, board_id={self.board_id}, permission={self.permission_level})>"
|
||||
30
backend/app/database/models/user.py
Normal file
30
backend/app/database/models/user.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""User model for authentication and ownership."""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Boolean, Column, DateTime, String
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from app.database.base import Base
|
||||
|
||||
|
||||
class User(Base):
|
||||
"""User model representing registered users."""
|
||||
|
||||
__tablename__ = "users"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
email = Column(String(255), unique=True, nullable=False, index=True)
|
||||
password_hash = Column(String(255), nullable=False)
|
||||
created_at = Column(DateTime, nullable=False, default=datetime.utcnow)
|
||||
updated_at = Column(DateTime, nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
is_active = Column(Boolean, nullable=False, default=True)
|
||||
|
||||
# Relationships
|
||||
boards = relationship("Board", back_populates="user", cascade="all, delete-orphan")
|
||||
images = relationship("Image", back_populates="user", cascade="all, delete-orphan")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<User(id={self.id}, email={self.email})>"
|
||||
27
backend/app/database/session.py
Normal file
27
backend/app/database/session.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""Database session management."""
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
# Create SQLAlchemy engine
|
||||
engine = create_engine(
|
||||
str(settings.DATABASE_URL),
|
||||
pool_size=settings.DATABASE_POOL_SIZE,
|
||||
max_overflow=settings.DATABASE_MAX_OVERFLOW,
|
||||
pool_pre_ping=True, # Verify connections before using
|
||||
echo=settings.DEBUG, # Log SQL queries in debug mode
|
||||
)
|
||||
|
||||
# Create session factory
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
|
||||
def get_db():
|
||||
"""Dependency for getting database session."""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
1
backend/app/images/__init__.py
Normal file
1
backend/app/images/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Image upload and processing package."""
|
||||
62
backend/app/images/download.py
Normal file
62
backend/app/images/download.py
Normal file
@@ -0,0 +1,62 @@
|
||||
"""Image download functionality."""
|
||||
|
||||
import io
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
||||
from app.core.storage import storage_client
|
||||
|
||||
|
||||
async def download_single_image(storage_path: str, filename: str) -> StreamingResponse:
|
||||
"""
|
||||
Download a single image from storage.
|
||||
|
||||
Args:
|
||||
storage_path: Path to image in MinIO
|
||||
filename: Original filename for download
|
||||
|
||||
Returns:
|
||||
StreamingResponse with image data
|
||||
|
||||
Raises:
|
||||
HTTPException: If image not found or download fails
|
||||
"""
|
||||
try:
|
||||
# Get image from storage
|
||||
image_data = storage_client.get_object(storage_path)
|
||||
|
||||
if image_data is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Image not found in storage",
|
||||
)
|
||||
|
||||
# Determine content type from file extension
|
||||
extension = Path(filename).suffix.lower()
|
||||
content_type_map = {
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
".png": "image/png",
|
||||
".gif": "image/gif",
|
||||
".webp": "image/webp",
|
||||
".svg": "image/svg+xml",
|
||||
}
|
||||
content_type = content_type_map.get(extension, "application/octet-stream")
|
||||
|
||||
# Return streaming response
|
||||
return StreamingResponse(
|
||||
io.BytesIO(image_data),
|
||||
media_type=content_type,
|
||||
headers={
|
||||
"Content-Disposition": f'attachment; filename="{filename}"',
|
||||
"Cache-Control": "no-cache",
|
||||
},
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to download image: {str(e)}",
|
||||
) from e
|
||||
228
backend/app/images/export_composite.py
Normal file
228
backend/app/images/export_composite.py
Normal file
@@ -0,0 +1,228 @@
|
||||
"""Composite image generation for board export."""
|
||||
|
||||
import io
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from fastapi.responses import StreamingResponse
|
||||
from PIL import Image as PILImage
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.core.storage import storage_client
|
||||
from app.database.models.board import Board
|
||||
from app.database.models.board_image import BoardImage
|
||||
from app.database.models.image import Image
|
||||
|
||||
|
||||
def create_composite_export(board_id: str, db: Session, scale: float = 1.0, format: str = "PNG") -> StreamingResponse:
|
||||
"""
|
||||
Create a composite image showing the entire board layout.
|
||||
|
||||
Args:
|
||||
board_id: Board UUID
|
||||
db: Database session
|
||||
scale: Resolution multiplier (1x, 2x, 4x)
|
||||
format: Output format (PNG or JPEG)
|
||||
|
||||
Returns:
|
||||
StreamingResponse with composite image
|
||||
|
||||
Raises:
|
||||
HTTPException: If export fails
|
||||
"""
|
||||
try:
|
||||
# Get board
|
||||
board = db.query(Board).filter(Board.id == board_id).first()
|
||||
if not board:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Board not found",
|
||||
)
|
||||
|
||||
# Get all images for the board with positions
|
||||
board_images = (
|
||||
db.query(BoardImage, Image)
|
||||
.join(Image, BoardImage.image_id == Image.id)
|
||||
.filter(BoardImage.board_id == board_id)
|
||||
.order_by(BoardImage.z_order)
|
||||
.all()
|
||||
)
|
||||
|
||||
if not board_images:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="No images found for this board",
|
||||
)
|
||||
|
||||
# Calculate canvas bounds
|
||||
bounds = _calculate_canvas_bounds(board_images)
|
||||
if not bounds:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Unable to calculate canvas bounds",
|
||||
)
|
||||
|
||||
min_x, min_y, max_x, max_y = bounds
|
||||
|
||||
# Calculate canvas size with padding
|
||||
padding = 50
|
||||
canvas_width = int((max_x - min_x + 2 * padding) * scale)
|
||||
canvas_height = int((max_y - min_y + 2 * padding) * scale)
|
||||
|
||||
# Limit canvas size to prevent memory issues
|
||||
max_dimension = 8192 # 8K resolution limit
|
||||
if canvas_width > max_dimension or canvas_height > max_dimension:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Composite image too large (max {max_dimension}x{max_dimension})",
|
||||
)
|
||||
|
||||
# Create blank canvas
|
||||
if format.upper() == "JPEG":
|
||||
canvas = PILImage.new("RGB", (canvas_width, canvas_height), color=(255, 255, 255))
|
||||
else:
|
||||
canvas = PILImage.new("RGBA", (canvas_width, canvas_height), color=(255, 255, 255, 255))
|
||||
|
||||
# Composite each image onto canvas
|
||||
for board_image, image in board_images:
|
||||
try:
|
||||
# Get image from storage
|
||||
image_data = storage_client.get_object(image.storage_path)
|
||||
if not image_data:
|
||||
continue
|
||||
|
||||
# Open image
|
||||
pil_image = PILImage.open(io.BytesIO(image_data))
|
||||
|
||||
# Apply transformations
|
||||
transformed_image = _apply_transformations(pil_image, board_image.transformations, scale)
|
||||
|
||||
# Calculate position on canvas
|
||||
pos = board_image.position
|
||||
x = int((pos["x"] - min_x + padding) * scale)
|
||||
y = int((pos["y"] - min_y + padding) * scale)
|
||||
|
||||
# Paste onto canvas
|
||||
if transformed_image.mode == "RGBA":
|
||||
canvas.paste(transformed_image, (x, y), transformed_image)
|
||||
else:
|
||||
canvas.paste(transformed_image, (x, y))
|
||||
|
||||
except Exception as e:
|
||||
# Log error but continue with other images
|
||||
print(f"Warning: Failed to composite {image.filename}: {str(e)}")
|
||||
continue
|
||||
|
||||
# Save to buffer
|
||||
output = io.BytesIO()
|
||||
if format.upper() == "JPEG":
|
||||
canvas = canvas.convert("RGB")
|
||||
canvas.save(output, format="JPEG", quality=95)
|
||||
media_type = "image/jpeg"
|
||||
extension = "jpg"
|
||||
else:
|
||||
canvas.save(output, format="PNG", optimize=True)
|
||||
media_type = "image/png"
|
||||
extension = "png"
|
||||
|
||||
output.seek(0)
|
||||
|
||||
# Return composite image
|
||||
return StreamingResponse(
|
||||
output,
|
||||
media_type=media_type,
|
||||
headers={
|
||||
"Content-Disposition": f'attachment; filename="board_composite.{extension}"',
|
||||
"Cache-Control": "no-cache",
|
||||
},
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to create composite export: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
def _calculate_canvas_bounds(board_images) -> tuple[float, float, float, float] | None:
|
||||
"""
|
||||
Calculate the bounding box for all images.
|
||||
|
||||
Args:
|
||||
board_images: List of (BoardImage, Image) tuples
|
||||
|
||||
Returns:
|
||||
Tuple of (min_x, min_y, max_x, max_y) or None
|
||||
"""
|
||||
if not board_images:
|
||||
return None
|
||||
|
||||
min_x = min_y = float("inf")
|
||||
max_x = max_y = float("-inf")
|
||||
|
||||
for board_image, image in board_images:
|
||||
pos = board_image.position
|
||||
transforms = board_image.transformations
|
||||
|
||||
x = pos["x"]
|
||||
y = pos["y"]
|
||||
width = image.width * transforms.get("scale", 1.0)
|
||||
height = image.height * transforms.get("scale", 1.0)
|
||||
|
||||
min_x = min(min_x, x)
|
||||
min_y = min(min_y, y)
|
||||
max_x = max(max_x, x + width)
|
||||
max_y = max(max_y, y + height)
|
||||
|
||||
return (min_x, min_y, max_x, max_y)
|
||||
|
||||
|
||||
def _apply_transformations(image: PILImage.Image, transformations: dict, scale: float) -> PILImage.Image:
|
||||
"""
|
||||
Apply transformations to an image.
|
||||
|
||||
Args:
|
||||
image: PIL Image
|
||||
transformations: Transformation dict
|
||||
scale: Resolution multiplier
|
||||
|
||||
Returns:
|
||||
Transformed PIL Image
|
||||
"""
|
||||
# Apply scale
|
||||
img_scale = transformations.get("scale", 1.0) * scale
|
||||
if img_scale != 1.0:
|
||||
new_width = int(image.width * img_scale)
|
||||
new_height = int(image.height * img_scale)
|
||||
image = image.resize((new_width, new_height), PILImage.Resampling.LANCZOS)
|
||||
|
||||
# Apply rotation
|
||||
rotation = transformations.get("rotation", 0)
|
||||
if rotation != 0:
|
||||
image = image.rotate(-rotation, expand=True, resample=PILImage.Resampling.BICUBIC)
|
||||
|
||||
# Apply flips
|
||||
if transformations.get("flipped_h", False):
|
||||
image = image.transpose(PILImage.Transpose.FLIP_LEFT_RIGHT)
|
||||
if transformations.get("flipped_v", False):
|
||||
image = image.transpose(PILImage.Transpose.FLIP_TOP_BOTTOM)
|
||||
|
||||
# Apply greyscale
|
||||
if transformations.get("greyscale", False):
|
||||
if image.mode == "RGBA":
|
||||
# Preserve alpha channel
|
||||
alpha = image.split()[-1]
|
||||
image = image.convert("L").convert("RGBA")
|
||||
image.putalpha(alpha)
|
||||
else:
|
||||
image = image.convert("L")
|
||||
|
||||
# Apply opacity
|
||||
opacity = transformations.get("opacity", 1.0)
|
||||
if opacity < 1.0 and image.mode in ("RGBA", "LA"):
|
||||
alpha = image.split()[-1]
|
||||
alpha = alpha.point(lambda p: int(p * opacity))
|
||||
image.putalpha(alpha)
|
||||
|
||||
return image
|
||||
103
backend/app/images/export_zip.py
Normal file
103
backend/app/images/export_zip.py
Normal file
@@ -0,0 +1,103 @@
|
||||
"""ZIP export functionality for multiple images."""
|
||||
|
||||
import io
|
||||
import zipfile
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from fastapi.responses import StreamingResponse
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.core.storage import storage_client
|
||||
from app.database.models.board_image import BoardImage
|
||||
from app.database.models.image import Image
|
||||
|
||||
|
||||
def create_zip_export(board_id: str, db: Session) -> StreamingResponse:
|
||||
"""
|
||||
Create a ZIP file containing all images from a board.
|
||||
|
||||
Args:
|
||||
board_id: Board UUID
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
StreamingResponse with ZIP file
|
||||
|
||||
Raises:
|
||||
HTTPException: If export fails
|
||||
"""
|
||||
try:
|
||||
# Get all images for the board
|
||||
board_images = (
|
||||
db.query(BoardImage, Image)
|
||||
.join(Image, BoardImage.image_id == Image.id)
|
||||
.filter(BoardImage.board_id == board_id)
|
||||
.all()
|
||||
)
|
||||
|
||||
if not board_images:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="No images found for this board",
|
||||
)
|
||||
|
||||
# Create ZIP file in memory
|
||||
zip_buffer = io.BytesIO()
|
||||
|
||||
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file:
|
||||
for _board_image, image in board_images:
|
||||
try:
|
||||
# Get image data from storage
|
||||
image_data = storage_client.get_object(image.storage_path)
|
||||
|
||||
if image_data:
|
||||
# Add to ZIP with sanitized filename
|
||||
safe_filename = _sanitize_filename(image.filename)
|
||||
zip_file.writestr(safe_filename, image_data)
|
||||
|
||||
except Exception as e:
|
||||
# Log error but continue with other images
|
||||
print(f"Warning: Failed to add {image.filename} to ZIP: {str(e)}")
|
||||
continue
|
||||
|
||||
# Reset buffer position
|
||||
zip_buffer.seek(0)
|
||||
|
||||
# Return ZIP file
|
||||
return StreamingResponse(
|
||||
zip_buffer,
|
||||
media_type="application/zip",
|
||||
headers={
|
||||
"Content-Disposition": 'attachment; filename="board_export.zip"',
|
||||
"Cache-Control": "no-cache",
|
||||
},
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to create ZIP export: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
def _sanitize_filename(filename: str) -> str:
|
||||
"""
|
||||
Sanitize filename for safe inclusion in ZIP.
|
||||
|
||||
Args:
|
||||
filename: Original filename
|
||||
|
||||
Returns:
|
||||
Sanitized filename
|
||||
"""
|
||||
# Remove any path separators and dangerous characters
|
||||
safe_chars = set("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789._- ")
|
||||
sanitized = "".join(c if c in safe_chars else "_" for c in filename)
|
||||
|
||||
# Ensure it's not empty and doesn't start with a dot
|
||||
if not sanitized or sanitized[0] == ".":
|
||||
sanitized = "file_" + sanitized
|
||||
|
||||
return sanitized
|
||||
98
backend/app/images/processing.py
Normal file
98
backend/app/images/processing.py
Normal file
@@ -0,0 +1,98 @@
|
||||
"""Image processing utilities - thumbnail generation."""
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
from uuid import UUID
|
||||
|
||||
from PIL import Image as PILImage
|
||||
|
||||
from app.core.storage import get_storage_client
|
||||
|
||||
# Thumbnail sizes (width in pixels, height proportional)
|
||||
THUMBNAIL_SIZES = {
|
||||
"low": 800, # For slow connections
|
||||
"medium": 1600, # For medium connections
|
||||
"high": 3200, # For fast connections
|
||||
}
|
||||
|
||||
|
||||
def generate_thumbnails(image_id: UUID, original_path: str, contents: bytes) -> dict[str, str]:
|
||||
"""
|
||||
Generate thumbnails at different resolutions.
|
||||
|
||||
Args:
|
||||
image_id: Image ID for naming thumbnails
|
||||
original_path: Path to original image
|
||||
contents: Original image contents
|
||||
|
||||
Returns:
|
||||
Dictionary mapping quality level to thumbnail storage path
|
||||
"""
|
||||
storage = get_storage_client()
|
||||
thumbnail_paths = {}
|
||||
|
||||
# Load original image
|
||||
image = PILImage.open(io.BytesIO(contents))
|
||||
|
||||
# Convert to RGB if necessary (for JPEG compatibility)
|
||||
if image.mode in ("RGBA", "LA", "P"):
|
||||
# Create white background for transparent images
|
||||
background = PILImage.new("RGB", image.size, (255, 255, 255))
|
||||
if image.mode == "P":
|
||||
image = image.convert("RGBA")
|
||||
background.paste(image, mask=image.split()[-1] if image.mode in ("RGBA", "LA") else None)
|
||||
image = background
|
||||
elif image.mode != "RGB":
|
||||
image = image.convert("RGB")
|
||||
|
||||
# Get original dimensions
|
||||
orig_width, orig_height = image.size
|
||||
|
||||
# Generate thumbnails for each size
|
||||
for quality, max_width in THUMBNAIL_SIZES.items():
|
||||
# Skip if original is smaller than thumbnail size
|
||||
if orig_width <= max_width:
|
||||
thumbnail_paths[quality] = original_path
|
||||
continue
|
||||
|
||||
# Calculate proportional height
|
||||
ratio = max_width / orig_width
|
||||
new_height = int(orig_height * ratio)
|
||||
|
||||
# Resize image
|
||||
thumbnail = image.resize((max_width, new_height), PILImage.Resampling.LANCZOS)
|
||||
|
||||
# Convert to WebP for better compression
|
||||
output = io.BytesIO()
|
||||
thumbnail.save(output, format="WEBP", quality=85, method=6)
|
||||
output.seek(0)
|
||||
|
||||
# Generate storage path
|
||||
thumbnail_path = f"thumbnails/{quality}/{image_id}.webp"
|
||||
|
||||
# Upload to MinIO
|
||||
storage.put_object(
|
||||
bucket_name="webref",
|
||||
object_name=thumbnail_path,
|
||||
data=output,
|
||||
length=len(output.getvalue()),
|
||||
content_type="image/webp",
|
||||
)
|
||||
|
||||
thumbnail_paths[quality] = thumbnail_path
|
||||
|
||||
return thumbnail_paths
|
||||
|
||||
|
||||
async def delete_thumbnails(thumbnail_paths: dict[str, str]) -> None:
|
||||
"""
|
||||
Delete thumbnails from storage.
|
||||
|
||||
Args:
|
||||
thumbnail_paths: Dictionary of quality -> path
|
||||
"""
|
||||
storage = get_storage_client()
|
||||
for path in thumbnail_paths.values():
|
||||
with contextlib.suppress(Exception):
|
||||
# Log error but continue
|
||||
storage.remove_object(bucket_name="webref", object_name=path)
|
||||
223
backend/app/images/repository.py
Normal file
223
backend/app/images/repository.py
Normal file
@@ -0,0 +1,223 @@
|
||||
"""Image repository for database operations."""
|
||||
|
||||
from collections.abc import Sequence
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database.models.board_image import BoardImage
|
||||
from app.database.models.image import Image
|
||||
|
||||
|
||||
class ImageRepository:
|
||||
"""Repository for image database operations."""
|
||||
|
||||
def __init__(self, db: AsyncSession):
|
||||
"""Initialize repository with database session."""
|
||||
self.db = db
|
||||
|
||||
async def create_image(
|
||||
self,
|
||||
user_id: UUID,
|
||||
filename: str,
|
||||
storage_path: str,
|
||||
file_size: int,
|
||||
mime_type: str,
|
||||
width: int,
|
||||
height: int,
|
||||
metadata: dict,
|
||||
) -> Image:
|
||||
"""
|
||||
Create new image record.
|
||||
|
||||
Args:
|
||||
user_id: Owner user ID
|
||||
filename: Original filename
|
||||
storage_path: Path in MinIO
|
||||
file_size: File size in bytes
|
||||
mime_type: MIME type
|
||||
width: Image width in pixels
|
||||
height: Image height in pixels
|
||||
metadata: Additional metadata (format, checksum, thumbnails, etc)
|
||||
|
||||
Returns:
|
||||
Created Image instance
|
||||
"""
|
||||
image = Image(
|
||||
user_id=user_id,
|
||||
filename=filename,
|
||||
storage_path=storage_path,
|
||||
file_size=file_size,
|
||||
mime_type=mime_type,
|
||||
width=width,
|
||||
height=height,
|
||||
metadata=metadata,
|
||||
)
|
||||
self.db.add(image)
|
||||
await self.db.commit()
|
||||
await self.db.refresh(image)
|
||||
return image
|
||||
|
||||
async def get_image_by_id(self, image_id: UUID) -> Image | None:
|
||||
"""
|
||||
Get image by ID.
|
||||
|
||||
Args:
|
||||
image_id: Image ID
|
||||
|
||||
Returns:
|
||||
Image instance or None
|
||||
"""
|
||||
result = await self.db.execute(select(Image).where(Image.id == image_id))
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_user_images(self, user_id: UUID, limit: int = 50, offset: int = 0) -> tuple[Sequence[Image], int]:
|
||||
"""
|
||||
Get all images for a user with pagination.
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
limit: Maximum number of images to return
|
||||
offset: Number of images to skip
|
||||
|
||||
Returns:
|
||||
Tuple of (images, total_count)
|
||||
"""
|
||||
# Get total count
|
||||
count_result = await self.db.execute(select(Image).where(Image.user_id == user_id))
|
||||
total = len(count_result.scalars().all())
|
||||
|
||||
# Get paginated results
|
||||
result = await self.db.execute(
|
||||
select(Image).where(Image.user_id == user_id).order_by(Image.created_at.desc()).limit(limit).offset(offset)
|
||||
)
|
||||
images = result.scalars().all()
|
||||
|
||||
return images, total
|
||||
|
||||
async def delete_image(self, image_id: UUID) -> bool:
|
||||
"""
|
||||
Delete image record.
|
||||
|
||||
Args:
|
||||
image_id: Image ID
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found
|
||||
"""
|
||||
image = await self.get_image_by_id(image_id)
|
||||
if not image:
|
||||
return False
|
||||
|
||||
await self.db.delete(image)
|
||||
await self.db.commit()
|
||||
return True
|
||||
|
||||
async def increment_reference_count(self, image_id: UUID) -> None:
|
||||
"""
|
||||
Increment reference count for image.
|
||||
|
||||
Args:
|
||||
image_id: Image ID
|
||||
"""
|
||||
image = await self.get_image_by_id(image_id)
|
||||
if image:
|
||||
image.reference_count += 1
|
||||
await self.db.commit()
|
||||
|
||||
async def decrement_reference_count(self, image_id: UUID) -> int:
|
||||
"""
|
||||
Decrement reference count for image.
|
||||
|
||||
Args:
|
||||
image_id: Image ID
|
||||
|
||||
Returns:
|
||||
New reference count
|
||||
"""
|
||||
image = await self.get_image_by_id(image_id)
|
||||
if image and image.reference_count > 0:
|
||||
image.reference_count -= 1
|
||||
await self.db.commit()
|
||||
return image.reference_count
|
||||
return 0
|
||||
|
||||
async def add_image_to_board(
|
||||
self,
|
||||
board_id: UUID,
|
||||
image_id: UUID,
|
||||
position: dict,
|
||||
transformations: dict,
|
||||
z_order: int = 0,
|
||||
) -> BoardImage:
|
||||
"""
|
||||
Add image to board.
|
||||
|
||||
Args:
|
||||
board_id: Board ID
|
||||
image_id: Image ID
|
||||
position: Canvas position {x, y}
|
||||
transformations: Image transformations
|
||||
z_order: Layer order
|
||||
|
||||
Returns:
|
||||
Created BoardImage instance
|
||||
"""
|
||||
board_image = BoardImage(
|
||||
board_id=board_id,
|
||||
image_id=image_id,
|
||||
position=position,
|
||||
transformations=transformations,
|
||||
z_order=z_order,
|
||||
)
|
||||
self.db.add(board_image)
|
||||
|
||||
# Increment reference count
|
||||
await self.increment_reference_count(image_id)
|
||||
|
||||
await self.db.commit()
|
||||
await self.db.refresh(board_image)
|
||||
return board_image
|
||||
|
||||
async def get_board_images(self, board_id: UUID) -> Sequence[BoardImage]:
|
||||
"""
|
||||
Get all images for a board, ordered by z-order.
|
||||
|
||||
Args:
|
||||
board_id: Board ID
|
||||
|
||||
Returns:
|
||||
List of BoardImage instances
|
||||
"""
|
||||
result = await self.db.execute(
|
||||
select(BoardImage).where(BoardImage.board_id == board_id).order_by(BoardImage.z_order.asc())
|
||||
)
|
||||
return result.scalars().all()
|
||||
|
||||
async def remove_image_from_board(self, board_id: UUID, image_id: UUID) -> bool:
|
||||
"""
|
||||
Remove image from board.
|
||||
|
||||
Args:
|
||||
board_id: Board ID
|
||||
image_id: Image ID
|
||||
|
||||
Returns:
|
||||
True if removed, False if not found
|
||||
"""
|
||||
result = await self.db.execute(
|
||||
select(BoardImage).where(BoardImage.board_id == board_id, BoardImage.image_id == image_id)
|
||||
)
|
||||
board_image = result.scalar_one_or_none()
|
||||
|
||||
if not board_image:
|
||||
return False
|
||||
|
||||
await self.db.delete(board_image)
|
||||
|
||||
# Decrement reference count
|
||||
await self.decrement_reference_count(image_id)
|
||||
|
||||
await self.db.commit()
|
||||
return True
|
||||
154
backend/app/images/schemas.py
Normal file
154
backend/app/images/schemas.py
Normal file
@@ -0,0 +1,154 @@
|
||||
"""Image schemas for request/response validation."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
|
||||
class ImageMetadata(BaseModel):
|
||||
"""Image metadata structure."""
|
||||
|
||||
format: str = Field(..., description="Image format (jpeg, png, etc)")
|
||||
checksum: str = Field(..., description="SHA256 checksum of file")
|
||||
exif: dict[str, Any] | None = Field(None, description="EXIF data if available")
|
||||
thumbnails: dict[str, str] = Field(default_factory=dict, description="Thumbnail URLs by quality level")
|
||||
|
||||
|
||||
class ImageUploadResponse(BaseModel):
|
||||
"""Response after successful image upload."""
|
||||
|
||||
id: UUID
|
||||
filename: str
|
||||
storage_path: str
|
||||
file_size: int
|
||||
mime_type: str
|
||||
width: int
|
||||
height: int
|
||||
metadata: dict[str, Any]
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
"""Pydantic config."""
|
||||
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class ImageResponse(BaseModel):
|
||||
"""Full image response with all fields."""
|
||||
|
||||
id: UUID
|
||||
user_id: UUID
|
||||
filename: str
|
||||
storage_path: str
|
||||
file_size: int
|
||||
mime_type: str
|
||||
width: int
|
||||
height: int
|
||||
metadata: dict[str, Any]
|
||||
created_at: datetime
|
||||
reference_count: int
|
||||
|
||||
class Config:
|
||||
"""Pydantic config."""
|
||||
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class BoardImageCreate(BaseModel):
|
||||
"""Schema for adding image to board."""
|
||||
|
||||
image_id: UUID = Field(..., description="ID of uploaded image")
|
||||
position: dict[str, float] = Field(default_factory=lambda: {"x": 0, "y": 0}, description="Canvas position")
|
||||
transformations: dict[str, Any] = Field(
|
||||
default_factory=lambda: {
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
description="Image transformations",
|
||||
)
|
||||
z_order: int = Field(default=0, description="Layer order")
|
||||
|
||||
@field_validator("position")
|
||||
@classmethod
|
||||
def validate_position(cls, v: dict[str, float]) -> dict[str, float]:
|
||||
"""Validate position has x and y."""
|
||||
if "x" not in v or "y" not in v:
|
||||
raise ValueError("Position must contain 'x' and 'y' coordinates")
|
||||
return v
|
||||
|
||||
|
||||
class BoardImageUpdate(BaseModel):
|
||||
"""Schema for updating board image position/transformations."""
|
||||
|
||||
position: dict[str, float] | None = Field(None, description="Canvas position")
|
||||
transformations: dict[str, Any] | None = Field(None, description="Image transformations")
|
||||
z_order: int | None = Field(None, description="Layer order")
|
||||
group_id: UUID | None = Field(None, description="Group membership")
|
||||
|
||||
@field_validator("position")
|
||||
@classmethod
|
||||
def validate_position(cls, v: dict[str, float] | None) -> dict[str, float] | None:
|
||||
"""Validate position has x and y if provided."""
|
||||
if v is not None and ("x" not in v or "y" not in v):
|
||||
raise ValueError("Position must contain 'x' and 'y' coordinates")
|
||||
return v
|
||||
|
||||
|
||||
class BoardImageResponse(BaseModel):
|
||||
"""Response for board image with all metadata."""
|
||||
|
||||
id: UUID
|
||||
board_id: UUID
|
||||
image_id: UUID
|
||||
position: dict[str, float]
|
||||
transformations: dict[str, Any]
|
||||
z_order: int
|
||||
group_id: UUID | None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
image: ImageResponse
|
||||
|
||||
class Config:
|
||||
"""Pydantic config."""
|
||||
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class BulkImageUpdate(BaseModel):
|
||||
"""Schema for bulk updating multiple images."""
|
||||
|
||||
image_ids: list[UUID] = Field(..., description="List of image IDs to update")
|
||||
position_delta: dict[str, float] | None = Field(None, description="Position delta to apply")
|
||||
transformations: dict[str, Any] | None = Field(None, description="Transformations to apply")
|
||||
z_order_delta: int | None = Field(None, description="Z-order delta to apply")
|
||||
|
||||
@field_validator("position_delta")
|
||||
@classmethod
|
||||
def validate_position_delta(cls, v: dict[str, float] | None) -> dict[str, float] | None:
|
||||
"""Validate position delta has dx and dy."""
|
||||
if v is not None and ("dx" not in v or "dy" not in v):
|
||||
raise ValueError("Position delta must contain 'dx' and 'dy'")
|
||||
return v
|
||||
|
||||
|
||||
class BulkUpdateResponse(BaseModel):
|
||||
"""Response for bulk update operation."""
|
||||
|
||||
updated_count: int = Field(..., description="Number of images updated")
|
||||
failed_count: int = Field(default=0, description="Number of images that failed to update")
|
||||
image_ids: list[UUID] = Field(..., description="IDs of successfully updated images")
|
||||
|
||||
|
||||
class ImageListResponse(BaseModel):
|
||||
"""Paginated list of images."""
|
||||
|
||||
images: list[ImageResponse]
|
||||
total: int
|
||||
page: int
|
||||
page_size: int
|
||||
74
backend/app/images/search.py
Normal file
74
backend/app/images/search.py
Normal file
@@ -0,0 +1,74 @@
|
||||
"""Image search and filtering functionality."""
|
||||
|
||||
from sqlalchemy import or_
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database.models.image import Image
|
||||
|
||||
|
||||
def search_images(
|
||||
user_id: str,
|
||||
db: Session,
|
||||
query: str | None = None,
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
) -> list[Image]:
|
||||
"""
|
||||
Search user's image library with optional filters.
|
||||
|
||||
Args:
|
||||
user_id: User UUID
|
||||
db: Database session
|
||||
query: Search query (searches filename)
|
||||
limit: Maximum results (default 50)
|
||||
offset: Pagination offset (default 0)
|
||||
|
||||
Returns:
|
||||
List of matching images
|
||||
"""
|
||||
# Base query - get user's images
|
||||
stmt = db.query(Image).filter(Image.user_id == user_id)
|
||||
|
||||
# Add search filter if query provided
|
||||
if query:
|
||||
search_term = f"%{query}%"
|
||||
stmt = stmt.filter(
|
||||
or_(
|
||||
Image.filename.ilike(search_term),
|
||||
Image.image_metadata["format"].astext.ilike(search_term),
|
||||
)
|
||||
)
|
||||
|
||||
# Order by most recently uploaded
|
||||
stmt = stmt.order_by(Image.created_at.desc())
|
||||
|
||||
# Apply pagination
|
||||
stmt = stmt.limit(limit).offset(offset)
|
||||
|
||||
return stmt.all()
|
||||
|
||||
|
||||
def count_images(user_id: str, db: Session, query: str | None = None) -> int:
|
||||
"""
|
||||
Count images matching search criteria.
|
||||
|
||||
Args:
|
||||
user_id: User UUID
|
||||
db: Database session
|
||||
query: Search query (optional)
|
||||
|
||||
Returns:
|
||||
Count of matching images
|
||||
"""
|
||||
stmt = db.query(Image).filter(Image.user_id == user_id)
|
||||
|
||||
if query:
|
||||
search_term = f"%{query}%"
|
||||
stmt = stmt.filter(
|
||||
or_(
|
||||
Image.filename.ilike(search_term),
|
||||
Image.image_metadata["format"].astext.ilike(search_term),
|
||||
)
|
||||
)
|
||||
|
||||
return stmt.count()
|
||||
103
backend/app/images/serve.py
Normal file
103
backend/app/images/serve.py
Normal file
@@ -0,0 +1,103 @@
|
||||
"""Image serving with quality-based thumbnail selection."""
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
||||
from app.database.models.image import Image
|
||||
|
||||
|
||||
def get_thumbnail_path(image: Image, quality: str) -> str:
|
||||
"""
|
||||
Get thumbnail path for specified quality level.
|
||||
|
||||
Args:
|
||||
image: Image model instance
|
||||
quality: Quality level ('low', 'medium', 'high', 'original')
|
||||
|
||||
Returns:
|
||||
Storage path to thumbnail
|
||||
|
||||
Raises:
|
||||
ValueError: If quality level is invalid
|
||||
"""
|
||||
if quality == "original":
|
||||
return image.storage_path
|
||||
|
||||
# Get thumbnail paths from metadata
|
||||
thumbnails = image.image_metadata.get("thumbnails", {})
|
||||
|
||||
# Map quality to thumbnail size
|
||||
if quality == "low":
|
||||
thumbnail_path = thumbnails.get("low")
|
||||
elif quality == "medium":
|
||||
thumbnail_path = thumbnails.get("medium")
|
||||
elif quality == "high":
|
||||
thumbnail_path = thumbnails.get("high")
|
||||
else:
|
||||
raise ValueError(f"Invalid quality level: {quality}")
|
||||
|
||||
# Fall back to original if thumbnail doesn't exist
|
||||
if not thumbnail_path:
|
||||
return image.storage_path
|
||||
|
||||
return thumbnail_path
|
||||
|
||||
|
||||
async def serve_image_with_quality(
|
||||
image: Image, quality: str = "medium", filename: str | None = None
|
||||
) -> StreamingResponse:
|
||||
"""
|
||||
Serve image with specified quality level.
|
||||
|
||||
Args:
|
||||
image: Image model instance
|
||||
quality: Quality level ('low', 'medium', 'high', 'original')
|
||||
filename: Optional custom filename for download
|
||||
|
||||
Returns:
|
||||
StreamingResponse with image data
|
||||
|
||||
Raises:
|
||||
HTTPException: If image cannot be served
|
||||
"""
|
||||
from app.images.download import download_single_image
|
||||
|
||||
try:
|
||||
# Get appropriate thumbnail path
|
||||
storage_path = get_thumbnail_path(image, quality)
|
||||
|
||||
# Use original filename if not specified
|
||||
if filename is None:
|
||||
filename = image.filename
|
||||
|
||||
# Serve the image
|
||||
return await download_single_image(storage_path, filename)
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e),
|
||||
) from e
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to serve image: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
def determine_quality_from_speed(speed_mbps: float) -> str:
|
||||
"""
|
||||
Determine appropriate quality level based on connection speed.
|
||||
|
||||
Args:
|
||||
speed_mbps: Connection speed in Mbps
|
||||
|
||||
Returns:
|
||||
Quality level string
|
||||
"""
|
||||
if speed_mbps < 1.0:
|
||||
return "low"
|
||||
elif speed_mbps < 5.0:
|
||||
return "medium"
|
||||
else:
|
||||
return "high"
|
||||
86
backend/app/images/upload.py
Normal file
86
backend/app/images/upload.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""Image upload handler with streaming to MinIO."""
|
||||
|
||||
import contextlib
|
||||
import hashlib
|
||||
import io
|
||||
from uuid import UUID
|
||||
|
||||
from PIL import Image as PILImage
|
||||
|
||||
from app.core.storage import get_storage_client
|
||||
|
||||
|
||||
async def upload_image_to_storage(
|
||||
user_id: UUID, image_id: UUID, filename: str, contents: bytes
|
||||
) -> tuple[str, int, int, str]:
|
||||
"""
|
||||
Upload image to MinIO storage.
|
||||
|
||||
Args:
|
||||
user_id: User ID for organizing storage
|
||||
image_id: Image ID for unique naming
|
||||
filename: Original filename
|
||||
contents: Image file contents
|
||||
|
||||
Returns:
|
||||
Tuple of (storage_path, width, height, mime_type)
|
||||
"""
|
||||
# Get storage client
|
||||
storage = get_storage_client()
|
||||
|
||||
# Generate storage path: originals/{user_id}/{image_id}.{ext}
|
||||
extension = filename.split(".")[-1].lower()
|
||||
storage_path = f"originals/{user_id}/{image_id}.{extension}"
|
||||
|
||||
# Detect image dimensions and format
|
||||
image = PILImage.open(io.BytesIO(contents))
|
||||
width, height = image.size
|
||||
format_name = image.format.lower() if image.format else extension
|
||||
|
||||
# Map PIL format to MIME type
|
||||
mime_type_map = {
|
||||
"jpeg": "image/jpeg",
|
||||
"jpg": "image/jpeg",
|
||||
"png": "image/png",
|
||||
"gif": "image/gif",
|
||||
"webp": "image/webp",
|
||||
"svg": "image/svg+xml",
|
||||
}
|
||||
mime_type = mime_type_map.get(format_name, f"image/{format_name}")
|
||||
|
||||
# Upload to MinIO
|
||||
storage.put_object(
|
||||
bucket_name="webref",
|
||||
object_name=storage_path,
|
||||
data=io.BytesIO(contents),
|
||||
length=len(contents),
|
||||
content_type=mime_type,
|
||||
)
|
||||
|
||||
return storage_path, width, height, mime_type
|
||||
|
||||
|
||||
def calculate_checksum(contents: bytes) -> str:
|
||||
"""
|
||||
Calculate SHA256 checksum of file contents.
|
||||
|
||||
Args:
|
||||
contents: File contents
|
||||
|
||||
Returns:
|
||||
SHA256 checksum as hex string
|
||||
"""
|
||||
return hashlib.sha256(contents).hexdigest()
|
||||
|
||||
|
||||
async def delete_image_from_storage(storage_path: str) -> None:
|
||||
"""
|
||||
Delete image from MinIO storage.
|
||||
|
||||
Args:
|
||||
storage_path: Path to image in storage
|
||||
"""
|
||||
storage = get_storage_client()
|
||||
with contextlib.suppress(Exception):
|
||||
# Log error but don't fail - image might already be deleted
|
||||
storage.remove_object(bucket_name="webref", object_name=storage_path)
|
||||
100
backend/app/images/validation.py
Normal file
100
backend/app/images/validation.py
Normal file
@@ -0,0 +1,100 @@
|
||||
"""File validation utilities for image uploads."""
|
||||
|
||||
import magic
|
||||
from fastapi import HTTPException, UploadFile, status
|
||||
|
||||
from app.core.constants import (
|
||||
ALLOWED_EXTENSIONS,
|
||||
ALLOWED_MIME_TYPES,
|
||||
MAX_IMAGE_SIZE,
|
||||
)
|
||||
|
||||
|
||||
async def validate_image_file(file: UploadFile) -> bytes:
|
||||
"""
|
||||
Validate uploaded image file.
|
||||
|
||||
Checks:
|
||||
- File size within limits
|
||||
- MIME type allowed
|
||||
- Magic bytes match declared type
|
||||
- File extension valid
|
||||
|
||||
Args:
|
||||
file: The uploaded file from FastAPI
|
||||
|
||||
Returns:
|
||||
File contents as bytes
|
||||
|
||||
Raises:
|
||||
HTTPException: If validation fails
|
||||
"""
|
||||
# Read file contents
|
||||
contents = await file.read()
|
||||
file_size = len(contents)
|
||||
|
||||
# Reset file pointer for potential re-reading
|
||||
await file.seek(0)
|
||||
|
||||
# Check file size
|
||||
if file_size == 0:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Empty file uploaded")
|
||||
|
||||
if file_size > MAX_IMAGE_SIZE:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
|
||||
detail=f"File too large. Maximum size is {MAX_IMAGE_SIZE / 1_048_576:.1f}MB",
|
||||
)
|
||||
|
||||
# Validate file extension
|
||||
if file.filename:
|
||||
extension = "." + file.filename.lower().split(".")[-1] if "." in file.filename else ""
|
||||
if extension not in ALLOWED_EXTENSIONS:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid file extension. Allowed: {', '.join(ALLOWED_EXTENSIONS)}",
|
||||
)
|
||||
|
||||
# Detect actual MIME type using magic bytes
|
||||
mime = magic.from_buffer(contents, mime=True)
|
||||
|
||||
# Validate MIME type
|
||||
if mime not in ALLOWED_MIME_TYPES:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid file type '{mime}'. Allowed types: {', '.join(ALLOWED_MIME_TYPES)}",
|
||||
)
|
||||
|
||||
return contents
|
||||
|
||||
|
||||
def sanitize_filename(filename: str) -> str:
|
||||
"""
|
||||
Sanitize filename to prevent path traversal and other attacks.
|
||||
|
||||
Args:
|
||||
filename: Original filename
|
||||
|
||||
Returns:
|
||||
Sanitized filename
|
||||
"""
|
||||
import re
|
||||
|
||||
# Remove path separators
|
||||
filename = filename.replace("/", "_").replace("\\", "_")
|
||||
|
||||
# Remove any non-alphanumeric characters except dots, dashes, underscores
|
||||
filename = re.sub(r"[^a-zA-Z0-9._-]", "_", filename)
|
||||
|
||||
# Limit length
|
||||
max_length = 255
|
||||
if len(filename) > max_length:
|
||||
# Keep extension
|
||||
parts = filename.rsplit(".", 1)
|
||||
if len(parts) == 2:
|
||||
name, ext = parts
|
||||
filename = name[: max_length - len(ext) - 1] + "." + ext
|
||||
else:
|
||||
filename = filename[:max_length]
|
||||
|
||||
return filename
|
||||
73
backend/app/images/zip_handler.py
Normal file
73
backend/app/images/zip_handler.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""ZIP file extraction handler for batch image uploads."""
|
||||
|
||||
import io
|
||||
import zipfile
|
||||
from collections.abc import AsyncIterator
|
||||
|
||||
from fastapi import HTTPException, UploadFile, status
|
||||
|
||||
|
||||
async def extract_images_from_zip(zip_file: UploadFile) -> AsyncIterator[tuple[str, bytes]]:
|
||||
"""
|
||||
Extract image files from ZIP archive.
|
||||
|
||||
Args:
|
||||
zip_file: Uploaded ZIP file
|
||||
|
||||
Yields:
|
||||
Tuples of (filename, contents) for each image file
|
||||
|
||||
Raises:
|
||||
HTTPException: If ZIP is invalid or too large
|
||||
"""
|
||||
# Read ZIP contents
|
||||
zip_contents = await zip_file.read()
|
||||
|
||||
# Check ZIP size (max 200MB for ZIP)
|
||||
max_zip_size = 200 * 1024 * 1024 # 200MB
|
||||
if len(zip_contents) > max_zip_size:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
|
||||
detail=f"ZIP file too large. Maximum size is {max_zip_size / 1_048_576:.1f}MB",
|
||||
)
|
||||
|
||||
try:
|
||||
# Open ZIP file
|
||||
with zipfile.ZipFile(io.BytesIO(zip_contents)) as zip_ref:
|
||||
# Get list of image files (filter by extension)
|
||||
image_extensions = {".jpg", ".jpeg", ".png", ".gif", ".webp", ".svg"}
|
||||
image_files = [
|
||||
name
|
||||
for name in zip_ref.namelist()
|
||||
if not name.startswith("__MACOSX/") # Skip macOS metadata
|
||||
and not name.startswith(".") # Skip hidden files
|
||||
and any(name.lower().endswith(ext) for ext in image_extensions)
|
||||
]
|
||||
|
||||
if not image_files:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="No valid image files found in ZIP archive",
|
||||
)
|
||||
|
||||
# Extract each image
|
||||
for filename in image_files:
|
||||
# Skip directories
|
||||
if filename.endswith("/"):
|
||||
continue
|
||||
|
||||
# Get just the filename without path
|
||||
base_filename = filename.split("/")[-1]
|
||||
|
||||
# Read file contents
|
||||
file_contents = zip_ref.read(filename)
|
||||
|
||||
yield base_filename, file_contents
|
||||
|
||||
except zipfile.BadZipFile as e:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid ZIP file") from e
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Error processing ZIP file: {str(e)}",
|
||||
) from e
|
||||
106
backend/app/main.py
Normal file
106
backend/app/main.py
Normal file
@@ -0,0 +1,106 @@
|
||||
"""FastAPI application entry point."""
|
||||
|
||||
import logging
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from app.api import auth, boards, export, groups, images, library, quality, sharing
|
||||
from app.core.config import settings
|
||||
from app.core.errors import WebRefException
|
||||
from app.core.logging import setup_logging
|
||||
from app.core.middleware import setup_middleware
|
||||
|
||||
# Setup logging
|
||||
setup_logging()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Create FastAPI application
|
||||
app = FastAPI(
|
||||
title=settings.APP_NAME,
|
||||
version=settings.APP_VERSION,
|
||||
description="Reference Board Viewer - Web-based visual reference management",
|
||||
docs_url="/docs",
|
||||
redoc_url="/redoc",
|
||||
openapi_url=f"{settings.API_V1_PREFIX}/openapi.json",
|
||||
)
|
||||
|
||||
# Setup middleware
|
||||
setup_middleware(app)
|
||||
|
||||
|
||||
# Exception handlers
|
||||
@app.exception_handler(WebRefException)
|
||||
async def webref_exception_handler(request: Request, exc: WebRefException):
|
||||
"""Handle custom WebRef exceptions."""
|
||||
logger.error(f"WebRef exception: {exc.message}", extra={"details": exc.details})
|
||||
return JSONResponse(
|
||||
status_code=exc.status_code,
|
||||
content={
|
||||
"error": exc.message,
|
||||
"details": exc.details,
|
||||
"status_code": exc.status_code,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(Exception)
|
||||
async def general_exception_handler(request: Request, exc: Exception):
|
||||
"""Handle unexpected exceptions."""
|
||||
logger.exception("Unexpected error occurred")
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={
|
||||
"error": "Internal server error",
|
||||
"details": str(exc) if settings.DEBUG else {},
|
||||
"status_code": 500,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
# Health check endpoint
|
||||
@app.get("/health", tags=["System"])
|
||||
async def health_check():
|
||||
"""Health check endpoint."""
|
||||
return {
|
||||
"status": "healthy",
|
||||
"version": settings.APP_VERSION,
|
||||
"app": settings.APP_NAME,
|
||||
}
|
||||
|
||||
|
||||
# Root endpoint
|
||||
@app.get("/", tags=["System"])
|
||||
async def root():
|
||||
"""Root endpoint with API information."""
|
||||
return {
|
||||
"message": f"Welcome to {settings.APP_NAME} API",
|
||||
"version": settings.APP_VERSION,
|
||||
"docs": "/docs",
|
||||
"health": "/health",
|
||||
}
|
||||
|
||||
|
||||
# API routers
|
||||
app.include_router(auth.router, prefix=f"{settings.API_V1_PREFIX}")
|
||||
app.include_router(boards.router, prefix=f"{settings.API_V1_PREFIX}")
|
||||
app.include_router(groups.router, prefix=f"{settings.API_V1_PREFIX}")
|
||||
app.include_router(images.router, prefix=f"{settings.API_V1_PREFIX}")
|
||||
app.include_router(sharing.router, prefix=f"{settings.API_V1_PREFIX}")
|
||||
app.include_router(export.router, prefix=f"{settings.API_V1_PREFIX}")
|
||||
app.include_router(library.router, prefix=f"{settings.API_V1_PREFIX}")
|
||||
app.include_router(quality.router, prefix=f"{settings.API_V1_PREFIX}")
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup_event():
|
||||
"""Application startup tasks."""
|
||||
logger.info(f"Starting {settings.APP_NAME} v{settings.APP_VERSION}")
|
||||
logger.info(f"Debug mode: {settings.DEBUG}")
|
||||
logger.info(f"API prefix: {settings.API_V1_PREFIX}")
|
||||
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown_event():
|
||||
"""Application shutdown tasks."""
|
||||
logger.info(f"Shutting down {settings.APP_NAME}")
|
||||
97
backend/pyproject.toml
Normal file
97
backend/pyproject.toml
Normal file
@@ -0,0 +1,97 @@
|
||||
[project]
|
||||
name = "webref-backend"
|
||||
version = "1.0.0"
|
||||
description = "Reference Board Viewer - Backend API"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"fastapi>=0.115.0",
|
||||
"uvicorn[standard]>=0.32.0",
|
||||
"sqlalchemy>=2.0.0",
|
||||
"alembic>=1.13.0",
|
||||
"pydantic>=2.9.0",
|
||||
"pydantic-settings>=2.6.0",
|
||||
"python-jose[cryptography]>=3.3.0",
|
||||
"passlib[bcrypt]>=1.7.4",
|
||||
"pillow>=11.0.0",
|
||||
"boto3>=1.35.0",
|
||||
"python-multipart>=0.0.12",
|
||||
"httpx>=0.27.0",
|
||||
"psycopg2>=2.9.0",
|
||||
"python-magic>=0.4.27",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pytest>=8.3.0",
|
||||
"pytest-cov>=6.0.0",
|
||||
"pytest-asyncio>=0.24.0",
|
||||
"ruff>=0.7.0",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.setuptools]
|
||||
packages = ["app"]
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
app = ["py.typed"]
|
||||
|
||||
[tool.ruff]
|
||||
# Exclude common paths
|
||||
exclude = [
|
||||
".git",
|
||||
".ruff_cache",
|
||||
".venv",
|
||||
"__pycache__",
|
||||
"alembic/versions",
|
||||
]
|
||||
|
||||
# Line length (slightly longer for SQLAlchemy models)
|
||||
line-length = 120
|
||||
|
||||
# Target Python 3.12
|
||||
target-version = "py312"
|
||||
|
||||
[tool.ruff.lint]
|
||||
# Enable pycodestyle (`E`), Pyflakes (`F`), isort (`I`)
|
||||
select = ["E", "F", "I", "W", "N", "UP", "B", "C4", "SIM"]
|
||||
ignore = [
|
||||
"B008", # Allow Depends() in FastAPI function defaults
|
||||
"N818", # Allow WebRefException without Error suffix
|
||||
]
|
||||
|
||||
# Allow unused variables when underscore-prefixed.
|
||||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"__init__.py" = ["F401"] # Allow unused imports in __init__.py
|
||||
"tests/*" = ["S101"] # Allow assert in tests
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = ["tests"]
|
||||
python_files = "test_*.py"
|
||||
python_classes = "Test*"
|
||||
python_functions = "test_*"
|
||||
addopts = [
|
||||
"--strict-markers",
|
||||
"--tb=short",
|
||||
"--cov=app",
|
||||
"--cov-report=term-missing",
|
||||
"--cov-report=html",
|
||||
# Temporarily disabled until tests are written (Phase 3 deferred T045-T047)
|
||||
# Will re-enable in Phase 23 (Testing & QA)
|
||||
# "--cov-fail-under=80",
|
||||
]
|
||||
asyncio_mode = "auto"
|
||||
|
||||
[tool.coverage.run]
|
||||
source = ["app"]
|
||||
omit = ["tests/*", "alembic/*"]
|
||||
|
||||
[tool.coverage.report]
|
||||
precision = 2
|
||||
show_missing = true
|
||||
skip_covered = false
|
||||
|
||||
54
backend/pytest.ini
Normal file
54
backend/pytest.ini
Normal file
@@ -0,0 +1,54 @@
|
||||
[pytest]
|
||||
# Test discovery
|
||||
testpaths = tests
|
||||
python_files = test_*.py
|
||||
python_classes = Test*
|
||||
python_functions = test_*
|
||||
|
||||
# Output options
|
||||
addopts =
|
||||
--strict-markers
|
||||
--tb=short
|
||||
--cov=app
|
||||
--cov-report=term-missing:skip-covered
|
||||
--cov-report=html
|
||||
--cov-report=xml
|
||||
--cov-fail-under=80
|
||||
-v
|
||||
--color=yes
|
||||
|
||||
# Async support
|
||||
asyncio_mode = auto
|
||||
|
||||
# Markers
|
||||
markers =
|
||||
slow: marks tests as slow (deselect with '-m "not slow"')
|
||||
integration: marks tests as integration tests
|
||||
unit: marks tests as unit tests
|
||||
auth: marks tests related to authentication
|
||||
boards: marks tests related to boards
|
||||
images: marks tests related to images
|
||||
upload: marks tests related to file uploads
|
||||
|
||||
# Coverage options
|
||||
[coverage:run]
|
||||
source = app
|
||||
omit =
|
||||
tests/*
|
||||
alembic/*
|
||||
app/__init__.py
|
||||
*/migrations/*
|
||||
|
||||
[coverage:report]
|
||||
precision = 2
|
||||
show_missing = true
|
||||
skip_covered = false
|
||||
exclude_lines =
|
||||
pragma: no cover
|
||||
def __repr__
|
||||
raise AssertionError
|
||||
raise NotImplementedError
|
||||
if __name__ == .__main__.:
|
||||
if TYPE_CHECKING:
|
||||
@abstractmethod
|
||||
|
||||
2
backend/tests/__init__.py
Normal file
2
backend/tests/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
"""Test package for Reference Board Viewer backend."""
|
||||
|
||||
2
backend/tests/api/__init__.py
Normal file
2
backend/tests/api/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
"""API endpoint tests."""
|
||||
|
||||
364
backend/tests/api/test_auth.py
Normal file
364
backend/tests/api/test_auth.py
Normal file
@@ -0,0 +1,364 @@
|
||||
"""Integration tests for authentication endpoints."""
|
||||
|
||||
from fastapi import status
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
class TestRegisterEndpoint:
|
||||
"""Test POST /auth/register endpoint."""
|
||||
|
||||
def test_register_user_success(self, client: TestClient, test_user_data: dict):
|
||||
"""Test successful user registration."""
|
||||
response = client.post("/api/v1/auth/register", json=test_user_data)
|
||||
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
|
||||
data = response.json()
|
||||
assert "id" in data
|
||||
assert data["email"] == test_user_data["email"]
|
||||
assert "password" not in data # Password should not be returned
|
||||
assert "password_hash" not in data
|
||||
assert "created_at" in data
|
||||
|
||||
def test_register_user_duplicate_email(self, client: TestClient, test_user_data: dict):
|
||||
"""Test that duplicate email registration fails."""
|
||||
# Register first user
|
||||
response1 = client.post("/api/v1/auth/register", json=test_user_data)
|
||||
assert response1.status_code == status.HTTP_201_CREATED
|
||||
|
||||
# Try to register with same email
|
||||
response2 = client.post("/api/v1/auth/register", json=test_user_data)
|
||||
|
||||
assert response2.status_code == status.HTTP_409_CONFLICT
|
||||
assert "already registered" in response2.json()["detail"].lower()
|
||||
|
||||
def test_register_user_weak_password(self, client: TestClient, test_user_data_weak_password: dict):
|
||||
"""Test that weak password is rejected."""
|
||||
response = client.post("/api/v1/auth/register", json=test_user_data_weak_password)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert "password" in response.json()["detail"].lower()
|
||||
|
||||
def test_register_user_no_uppercase(self, client: TestClient, test_user_data_no_uppercase: dict):
|
||||
"""Test that password without uppercase is rejected."""
|
||||
response = client.post("/api/v1/auth/register", json=test_user_data_no_uppercase)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert "uppercase" in response.json()["detail"].lower()
|
||||
|
||||
def test_register_user_no_lowercase(self, client: TestClient):
|
||||
"""Test that password without lowercase is rejected."""
|
||||
user_data = {"email": "test@example.com", "password": "TESTPASSWORD123"}
|
||||
response = client.post("/api/v1/auth/register", json=user_data)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert "lowercase" in response.json()["detail"].lower()
|
||||
|
||||
def test_register_user_no_number(self, client: TestClient):
|
||||
"""Test that password without number is rejected."""
|
||||
user_data = {"email": "test@example.com", "password": "TestPassword"}
|
||||
response = client.post("/api/v1/auth/register", json=user_data)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert "number" in response.json()["detail"].lower()
|
||||
|
||||
def test_register_user_too_short(self, client: TestClient):
|
||||
"""Test that password shorter than 8 characters is rejected."""
|
||||
user_data = {"email": "test@example.com", "password": "Test123"}
|
||||
response = client.post("/api/v1/auth/register", json=user_data)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert "8 characters" in response.json()["detail"].lower()
|
||||
|
||||
def test_register_user_invalid_email(self, client: TestClient):
|
||||
"""Test that invalid email format is rejected."""
|
||||
invalid_emails = [
|
||||
{"email": "not-an-email", "password": "TestPassword123"},
|
||||
{"email": "missing@domain", "password": "TestPassword123"},
|
||||
{"email": "@example.com", "password": "TestPassword123"},
|
||||
{"email": "user@", "password": "TestPassword123"},
|
||||
]
|
||||
|
||||
for user_data in invalid_emails:
|
||||
response = client.post("/api/v1/auth/register", json=user_data)
|
||||
assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
|
||||
|
||||
def test_register_user_missing_fields(self, client: TestClient):
|
||||
"""Test that missing required fields are rejected."""
|
||||
# Missing email
|
||||
response1 = client.post("/api/v1/auth/register", json={"password": "TestPassword123"})
|
||||
assert response1.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
|
||||
|
||||
# Missing password
|
||||
response2 = client.post("/api/v1/auth/register", json={"email": "test@example.com"})
|
||||
assert response2.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
|
||||
|
||||
# Empty body
|
||||
response3 = client.post("/api/v1/auth/register", json={})
|
||||
assert response3.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
|
||||
|
||||
def test_register_user_email_case_handling(self, client: TestClient):
|
||||
"""Test email case handling in registration."""
|
||||
user_data_upper = {"email": "TEST@EXAMPLE.COM", "password": "TestPassword123"}
|
||||
|
||||
response = client.post("/api/v1/auth/register", json=user_data_upper)
|
||||
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
# Email should be stored as lowercase
|
||||
data = response.json()
|
||||
assert data["email"] == "test@example.com"
|
||||
|
||||
|
||||
class TestLoginEndpoint:
|
||||
"""Test POST /auth/login endpoint."""
|
||||
|
||||
def test_login_user_success(self, client: TestClient, test_user_data: dict):
|
||||
"""Test successful user login."""
|
||||
# Register user first
|
||||
client.post("/api/v1/auth/register", json=test_user_data)
|
||||
|
||||
# Login
|
||||
response = client.post("/api/v1/auth/login", json=test_user_data)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
data = response.json()
|
||||
assert "access_token" in data
|
||||
assert data["token_type"] == "bearer"
|
||||
assert "user" in data
|
||||
assert data["user"]["email"] == test_user_data["email"]
|
||||
|
||||
def test_login_user_wrong_password(self, client: TestClient, test_user_data: dict):
|
||||
"""Test that wrong password fails login."""
|
||||
# Register user
|
||||
client.post("/api/v1/auth/register", json=test_user_data)
|
||||
|
||||
# Try to login with wrong password
|
||||
wrong_data = {"email": test_user_data["email"], "password": "WrongPassword123"}
|
||||
response = client.post("/api/v1/auth/login", json=wrong_data)
|
||||
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
assert "WWW-Authenticate" in response.headers
|
||||
assert response.headers["WWW-Authenticate"] == "Bearer"
|
||||
|
||||
def test_login_user_nonexistent_email(self, client: TestClient):
|
||||
"""Test that login with nonexistent email fails."""
|
||||
login_data = {"email": "nonexistent@example.com", "password": "TestPassword123"}
|
||||
response = client.post("/api/v1/auth/login", json=login_data)
|
||||
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
def test_login_user_case_sensitive_password(self, client: TestClient, test_user_data: dict):
|
||||
"""Test that password is case-sensitive."""
|
||||
# Register user
|
||||
client.post("/api/v1/auth/register", json=test_user_data)
|
||||
|
||||
# Try to login with different case
|
||||
wrong_case = {"email": test_user_data["email"], "password": test_user_data["password"].lower()}
|
||||
response = client.post("/api/v1/auth/login", json=wrong_case)
|
||||
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
def test_login_user_email_case_insensitive(self, client: TestClient, test_user_data: dict):
|
||||
"""Test that email login is case-insensitive."""
|
||||
# Register user
|
||||
client.post("/api/v1/auth/register", json=test_user_data)
|
||||
|
||||
# Login with different email case
|
||||
upper_email = {"email": test_user_data["email"].upper(), "password": test_user_data["password"]}
|
||||
response = client.post("/api/v1/auth/login", json=upper_email)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
def test_login_user_missing_fields(self, client: TestClient):
|
||||
"""Test that missing fields are rejected."""
|
||||
# Missing password
|
||||
response1 = client.post("/api/v1/auth/login", json={"email": "test@example.com"})
|
||||
assert response1.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
|
||||
|
||||
# Missing email
|
||||
response2 = client.post("/api/v1/auth/login", json={"password": "TestPassword123"})
|
||||
assert response2.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
|
||||
|
||||
def test_login_user_token_format(self, client: TestClient, test_user_data: dict):
|
||||
"""Test that returned token is valid JWT format."""
|
||||
# Register and login
|
||||
client.post("/api/v1/auth/register", json=test_user_data)
|
||||
response = client.post("/api/v1/auth/login", json=test_user_data)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
data = response.json()
|
||||
token = data["access_token"]
|
||||
|
||||
# JWT should have 3 parts separated by dots
|
||||
parts = token.split(".")
|
||||
assert len(parts) == 3
|
||||
|
||||
# Each part should be base64-encoded (URL-safe)
|
||||
import string
|
||||
|
||||
url_safe = string.ascii_letters + string.digits + "-_"
|
||||
for part in parts:
|
||||
assert all(c in url_safe for c in part)
|
||||
|
||||
|
||||
class TestGetCurrentUserEndpoint:
|
||||
"""Test GET /auth/me endpoint."""
|
||||
|
||||
def test_get_current_user_success(self, client: TestClient, test_user_data: dict):
|
||||
"""Test getting current user info with valid token."""
|
||||
# Register and login
|
||||
client.post("/api/v1/auth/register", json=test_user_data)
|
||||
login_response = client.post("/api/v1/auth/login", json=test_user_data)
|
||||
|
||||
token = login_response.json()["access_token"]
|
||||
|
||||
# Get current user
|
||||
response = client.get("/api/v1/auth/me", headers={"Authorization": f"Bearer {token}"})
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
data = response.json()
|
||||
assert data["email"] == test_user_data["email"]
|
||||
assert "id" in data
|
||||
assert "created_at" in data
|
||||
assert "password" not in data
|
||||
|
||||
def test_get_current_user_no_token(self, client: TestClient):
|
||||
"""Test that missing token returns 401."""
|
||||
response = client.get("/api/v1/auth/me")
|
||||
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
def test_get_current_user_invalid_token(self, client: TestClient):
|
||||
"""Test that invalid token returns 401."""
|
||||
response = client.get("/api/v1/auth/me", headers={"Authorization": "Bearer invalid_token"})
|
||||
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
def test_get_current_user_malformed_header(self, client: TestClient):
|
||||
"""Test that malformed auth header returns 401."""
|
||||
# Missing "Bearer" prefix
|
||||
response1 = client.get("/api/v1/auth/me", headers={"Authorization": "just_a_token"})
|
||||
assert response1.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
# Wrong prefix
|
||||
response2 = client.get("/api/v1/auth/me", headers={"Authorization": "Basic dGVzdA=="})
|
||||
assert response2.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
def test_get_current_user_expired_token(self, client: TestClient, test_user_data: dict):
|
||||
"""Test that expired token returns 401."""
|
||||
from datetime import timedelta
|
||||
|
||||
from app.auth.jwt import create_access_token
|
||||
|
||||
# Register user
|
||||
register_response = client.post("/api/v1/auth/register", json=test_user_data)
|
||||
user_id = register_response.json()["id"]
|
||||
|
||||
# Create expired token
|
||||
from uuid import UUID
|
||||
|
||||
expired_token = create_access_token(UUID(user_id), test_user_data["email"], timedelta(seconds=-10))
|
||||
|
||||
# Try to use expired token
|
||||
response = client.get("/api/v1/auth/me", headers={"Authorization": f"Bearer {expired_token}"})
|
||||
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
class TestAuthenticationFlow:
|
||||
"""Test complete authentication flows."""
|
||||
|
||||
def test_complete_register_login_access_flow(self, client: TestClient, test_user_data: dict):
|
||||
"""Test complete flow: register → login → access protected resource."""
|
||||
# Step 1: Register
|
||||
register_response = client.post("/api/v1/auth/register", json=test_user_data)
|
||||
assert register_response.status_code == status.HTTP_201_CREATED
|
||||
|
||||
registered_user = register_response.json()
|
||||
assert registered_user["email"] == test_user_data["email"]
|
||||
|
||||
# Step 2: Login
|
||||
login_response = client.post("/api/v1/auth/login", json=test_user_data)
|
||||
assert login_response.status_code == status.HTTP_200_OK
|
||||
|
||||
token = login_response.json()["access_token"]
|
||||
login_user = login_response.json()["user"]
|
||||
assert login_user["id"] == registered_user["id"]
|
||||
|
||||
# Step 3: Access protected resource
|
||||
me_response = client.get("/api/v1/auth/me", headers={"Authorization": f"Bearer {token}"})
|
||||
assert me_response.status_code == status.HTTP_200_OK
|
||||
|
||||
current_user = me_response.json()
|
||||
assert current_user["id"] == registered_user["id"]
|
||||
assert current_user["email"] == test_user_data["email"]
|
||||
|
||||
def test_multiple_users_independent_authentication(self, client: TestClient):
|
||||
"""Test that multiple users can register and authenticate independently."""
|
||||
users = [
|
||||
{"email": "user1@example.com", "password": "Password123"},
|
||||
{"email": "user2@example.com", "password": "Password456"},
|
||||
{"email": "user3@example.com", "password": "Password789"},
|
||||
]
|
||||
|
||||
tokens = []
|
||||
|
||||
# Register all users
|
||||
for user_data in users:
|
||||
register_response = client.post("/api/v1/auth/register", json=user_data)
|
||||
assert register_response.status_code == status.HTTP_201_CREATED
|
||||
|
||||
# Login each user
|
||||
login_response = client.post("/api/v1/auth/login", json=user_data)
|
||||
assert login_response.status_code == status.HTTP_200_OK
|
||||
|
||||
tokens.append(login_response.json()["access_token"])
|
||||
|
||||
# Verify each token works independently
|
||||
for i, (user_data, token) in enumerate(zip(users, tokens)):
|
||||
response = client.get("/api/v1/auth/me", headers={"Authorization": f"Bearer {token}"})
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["email"] == user_data["email"]
|
||||
|
||||
def test_token_reuse_across_multiple_requests(self, client: TestClient, test_user_data: dict):
|
||||
"""Test that same token can be reused for multiple requests."""
|
||||
# Register and login
|
||||
client.post("/api/v1/auth/register", json=test_user_data)
|
||||
login_response = client.post("/api/v1/auth/login", json=test_user_data)
|
||||
|
||||
token = login_response.json()["access_token"]
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
# Make multiple requests with same token
|
||||
for _ in range(5):
|
||||
response = client.get("/api/v1/auth/me", headers=headers)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["email"] == test_user_data["email"]
|
||||
|
||||
def test_password_not_exposed_in_any_response(self, client: TestClient, test_user_data: dict):
|
||||
"""Test that password is never exposed in any API response."""
|
||||
# Register
|
||||
register_response = client.post("/api/v1/auth/register", json=test_user_data)
|
||||
register_data = register_response.json()
|
||||
|
||||
assert "password" not in register_data
|
||||
assert "password_hash" not in register_data
|
||||
|
||||
# Login
|
||||
login_response = client.post("/api/v1/auth/login", json=test_user_data)
|
||||
login_data = login_response.json()
|
||||
|
||||
assert "password" not in str(login_data)
|
||||
assert "password_hash" not in str(login_data)
|
||||
|
||||
# Get current user
|
||||
token = login_data["access_token"]
|
||||
me_response = client.get("/api/v1/auth/me", headers={"Authorization": f"Bearer {token}"})
|
||||
me_data = me_response.json()
|
||||
|
||||
assert "password" not in me_data
|
||||
assert "password_hash" not in me_data
|
||||
|
||||
558
backend/tests/api/test_boards.py
Normal file
558
backend/tests/api/test_boards.py
Normal file
@@ -0,0 +1,558 @@
|
||||
"""Integration tests for board API endpoints."""
|
||||
|
||||
import pytest
|
||||
from fastapi import status
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client(client: TestClient, test_user_data: dict) -> tuple[TestClient, dict]:
|
||||
"""
|
||||
Create authenticated client with token.
|
||||
|
||||
Returns:
|
||||
Tuple of (client, auth_headers)
|
||||
"""
|
||||
# Register and login
|
||||
client.post("/api/v1/auth/register", json=test_user_data)
|
||||
login_response = client.post("/api/v1/auth/login", json=test_user_data)
|
||||
|
||||
token = login_response.json()["access_token"]
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
return client, headers
|
||||
|
||||
|
||||
class TestCreateBoardEndpoint:
|
||||
"""Test POST /boards endpoint."""
|
||||
|
||||
def test_create_board_success(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test successful board creation."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
board_data = {"title": "My First Board", "description": "Test description"}
|
||||
|
||||
response = client.post("/api/v1/boards", json=board_data, headers=headers)
|
||||
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
|
||||
data = response.json()
|
||||
assert "id" in data
|
||||
assert data["title"] == "My First Board"
|
||||
assert data["description"] == "Test description"
|
||||
assert "viewport_state" in data
|
||||
assert data["viewport_state"]["zoom"] == 1.0
|
||||
assert data["is_deleted"] is False
|
||||
|
||||
def test_create_board_minimal(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test creating board with only title."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
board_data = {"title": "Minimal Board"}
|
||||
|
||||
response = client.post("/api/v1/boards", json=board_data, headers=headers)
|
||||
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
|
||||
data = response.json()
|
||||
assert data["title"] == "Minimal Board"
|
||||
assert data["description"] is None
|
||||
|
||||
def test_create_board_empty_title(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test that empty title is rejected."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
board_data = {"title": ""}
|
||||
|
||||
response = client.post("/api/v1/boards", json=board_data, headers=headers)
|
||||
|
||||
assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
|
||||
|
||||
def test_create_board_missing_title(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test that missing title is rejected."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
board_data = {"description": "No title"}
|
||||
|
||||
response = client.post("/api/v1/boards", json=board_data, headers=headers)
|
||||
|
||||
assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
|
||||
|
||||
def test_create_board_unauthenticated(self, client: TestClient):
|
||||
"""Test that unauthenticated users can't create boards."""
|
||||
board_data = {"title": "Unauthorized Board"}
|
||||
|
||||
response = client.post("/api/v1/boards", json=board_data)
|
||||
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
class TestListBoardsEndpoint:
|
||||
"""Test GET /boards endpoint."""
|
||||
|
||||
def test_list_boards_empty(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test listing boards when user has none."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
response = client.get("/api/v1/boards", headers=headers)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
data = response.json()
|
||||
assert data["boards"] == []
|
||||
assert data["total"] == 0
|
||||
assert data["limit"] == 50
|
||||
assert data["offset"] == 0
|
||||
|
||||
def test_list_boards_multiple(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test listing multiple boards."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
# Create 3 boards
|
||||
for i in range(3):
|
||||
client.post(
|
||||
"/api/v1/boards", json={"title": f"Board {i}"}, headers=headers
|
||||
)
|
||||
|
||||
response = client.get("/api/v1/boards", headers=headers)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
data = response.json()
|
||||
assert len(data["boards"]) == 3
|
||||
assert data["total"] == 3
|
||||
|
||||
def test_list_boards_pagination(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test board pagination."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
# Create 5 boards
|
||||
for i in range(5):
|
||||
client.post(
|
||||
"/api/v1/boards", json={"title": f"Board {i}"}, headers=headers
|
||||
)
|
||||
|
||||
# Get first page
|
||||
response1 = client.get("/api/v1/boards?limit=2&offset=0", headers=headers)
|
||||
data1 = response1.json()
|
||||
|
||||
assert len(data1["boards"]) == 2
|
||||
assert data1["total"] == 5
|
||||
assert data1["limit"] == 2
|
||||
assert data1["offset"] == 0
|
||||
|
||||
# Get second page
|
||||
response2 = client.get("/api/v1/boards?limit=2&offset=2", headers=headers)
|
||||
data2 = response2.json()
|
||||
|
||||
assert len(data2["boards"]) == 2
|
||||
assert data2["total"] == 5
|
||||
|
||||
def test_list_boards_unauthenticated(self, client: TestClient):
|
||||
"""Test that unauthenticated users can't list boards."""
|
||||
response = client.get("/api/v1/boards")
|
||||
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
class TestGetBoardEndpoint:
|
||||
"""Test GET /boards/{board_id} endpoint."""
|
||||
|
||||
def test_get_board_success(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test getting existing board."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
# Create board
|
||||
create_response = client.post(
|
||||
"/api/v1/boards", json={"title": "Test Board"}, headers=headers
|
||||
)
|
||||
board_id = create_response.json()["id"]
|
||||
|
||||
# Get board
|
||||
response = client.get(f"/api/v1/boards/{board_id}", headers=headers)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
data = response.json()
|
||||
assert data["id"] == board_id
|
||||
assert data["title"] == "Test Board"
|
||||
|
||||
def test_get_board_not_found(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test getting nonexistent board."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
fake_id = "00000000-0000-0000-0000-000000000000"
|
||||
|
||||
response = client.get(f"/api/v1/boards/{fake_id}", headers=headers)
|
||||
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
def test_get_board_unauthenticated(self, client: TestClient):
|
||||
"""Test that unauthenticated users can't get boards."""
|
||||
fake_id = "00000000-0000-0000-0000-000000000000"
|
||||
|
||||
response = client.get(f"/api/v1/boards/{fake_id}")
|
||||
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
class TestUpdateBoardEndpoint:
|
||||
"""Test PATCH /boards/{board_id} endpoint."""
|
||||
|
||||
def test_update_board_title(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test updating board title."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
# Create board
|
||||
create_response = client.post(
|
||||
"/api/v1/boards", json={"title": "Original Title"}, headers=headers
|
||||
)
|
||||
board_id = create_response.json()["id"]
|
||||
|
||||
# Update title
|
||||
update_data = {"title": "Updated Title"}
|
||||
response = client.patch(f"/api/v1/boards/{board_id}", json=update_data, headers=headers)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
data = response.json()
|
||||
assert data["title"] == "Updated Title"
|
||||
|
||||
def test_update_board_description(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test updating board description."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
# Create board
|
||||
create_response = client.post(
|
||||
"/api/v1/boards", json={"title": "Test Board"}, headers=headers
|
||||
)
|
||||
board_id = create_response.json()["id"]
|
||||
|
||||
# Update description
|
||||
update_data = {"description": "New description"}
|
||||
response = client.patch(f"/api/v1/boards/{board_id}", json=update_data, headers=headers)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
data = response.json()
|
||||
assert data["description"] == "New description"
|
||||
|
||||
def test_update_board_viewport(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test updating viewport state."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
# Create board
|
||||
create_response = client.post(
|
||||
"/api/v1/boards", json={"title": "Test Board"}, headers=headers
|
||||
)
|
||||
board_id = create_response.json()["id"]
|
||||
|
||||
# Update viewport
|
||||
update_data = {"viewport_state": {"x": 100, "y": 200, "zoom": 1.5, "rotation": 45}}
|
||||
response = client.patch(f"/api/v1/boards/{board_id}", json=update_data, headers=headers)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
data = response.json()
|
||||
assert data["viewport_state"]["x"] == 100
|
||||
assert data["viewport_state"]["y"] == 200
|
||||
assert data["viewport_state"]["zoom"] == 1.5
|
||||
assert data["viewport_state"]["rotation"] == 45
|
||||
|
||||
def test_update_board_invalid_viewport(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test that invalid viewport values are rejected."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
# Create board
|
||||
create_response = client.post(
|
||||
"/api/v1/boards", json={"title": "Test Board"}, headers=headers
|
||||
)
|
||||
board_id = create_response.json()["id"]
|
||||
|
||||
# Try invalid zoom (out of range)
|
||||
update_data = {"viewport_state": {"x": 0, "y": 0, "zoom": 10.0, "rotation": 0}}
|
||||
response = client.patch(f"/api/v1/boards/{board_id}", json=update_data, headers=headers)
|
||||
|
||||
assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
|
||||
|
||||
def test_update_board_not_found(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test updating nonexistent board."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
fake_id = "00000000-0000-0000-0000-000000000000"
|
||||
update_data = {"title": "Updated"}
|
||||
|
||||
response = client.patch(f"/api/v1/boards/{fake_id}", json=update_data, headers=headers)
|
||||
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
class TestDeleteBoardEndpoint:
|
||||
"""Test DELETE /boards/{board_id} endpoint."""
|
||||
|
||||
def test_delete_board_success(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test successfully deleting a board."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
# Create board
|
||||
create_response = client.post(
|
||||
"/api/v1/boards", json={"title": "Test Board"}, headers=headers
|
||||
)
|
||||
board_id = create_response.json()["id"]
|
||||
|
||||
# Delete board
|
||||
response = client.delete(f"/api/v1/boards/{board_id}", headers=headers)
|
||||
|
||||
assert response.status_code == status.HTTP_204_NO_CONTENT
|
||||
|
||||
# Verify board is gone from listings
|
||||
list_response = client.get("/api/v1/boards", headers=headers)
|
||||
boards = list_response.json()["boards"]
|
||||
assert not any(b["id"] == board_id for b in boards)
|
||||
|
||||
def test_delete_board_not_found(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test deleting nonexistent board."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
fake_id = "00000000-0000-0000-0000-000000000000"
|
||||
|
||||
response = client.delete(f"/api/v1/boards/{fake_id}", headers=headers)
|
||||
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
def test_delete_board_unauthenticated(self, client: TestClient):
|
||||
"""Test that unauthenticated users can't delete boards."""
|
||||
fake_id = "00000000-0000-0000-0000-000000000000"
|
||||
|
||||
response = client.delete(f"/api/v1/boards/{fake_id}")
|
||||
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
class TestBoardOwnershipIsolation:
|
||||
"""Test that users can only access their own boards."""
|
||||
|
||||
def test_users_cannot_see_each_others_boards(self, client: TestClient):
|
||||
"""Test that users only see their own boards in listings."""
|
||||
# Create user1 and boards
|
||||
user1_data = {"email": "user1@example.com", "password": "Password123"}
|
||||
client.post("/api/v1/auth/register", json=user1_data)
|
||||
login1 = client.post("/api/v1/auth/login", json=user1_data)
|
||||
token1 = login1.json()["access_token"]
|
||||
headers1 = {"Authorization": f"Bearer {token1}"}
|
||||
|
||||
client.post("/api/v1/boards", json={"title": "User 1 Board"}, headers=headers1)
|
||||
|
||||
# Create user2 and boards
|
||||
user2_data = {"email": "user2@example.com", "password": "Password456"}
|
||||
client.post("/api/v1/auth/register", json=user2_data)
|
||||
login2 = client.post("/api/v1/auth/login", json=user2_data)
|
||||
token2 = login2.json()["access_token"]
|
||||
headers2 = {"Authorization": f"Bearer {token2}"}
|
||||
|
||||
client.post("/api/v1/boards", json={"title": "User 2 Board"}, headers=headers2)
|
||||
|
||||
# User1 should only see their board
|
||||
response1 = client.get("/api/v1/boards", headers=headers1)
|
||||
boards1 = response1.json()["boards"]
|
||||
assert len(boards1) == 1
|
||||
assert boards1[0]["title"] == "User 1 Board"
|
||||
|
||||
# User2 should only see their board
|
||||
response2 = client.get("/api/v1/boards", headers=headers2)
|
||||
boards2 = response2.json()["boards"]
|
||||
assert len(boards2) == 1
|
||||
assert boards2[0]["title"] == "User 2 Board"
|
||||
|
||||
def test_users_cannot_access_each_others_boards_directly(self, client: TestClient):
|
||||
"""Test that users can't access boards they don't own."""
|
||||
# Create user1 and board
|
||||
user1_data = {"email": "user1@example.com", "password": "Password123"}
|
||||
client.post("/api/v1/auth/register", json=user1_data)
|
||||
login1 = client.post("/api/v1/auth/login", json=user1_data)
|
||||
token1 = login1.json()["access_token"]
|
||||
headers1 = {"Authorization": f"Bearer {token1}"}
|
||||
|
||||
create_response = client.post(
|
||||
"/api/v1/boards", json={"title": "User 1 Board"}, headers=headers1
|
||||
)
|
||||
board_id = create_response.json()["id"]
|
||||
|
||||
# Create user2
|
||||
user2_data = {"email": "user2@example.com", "password": "Password456"}
|
||||
client.post("/api/v1/auth/register", json=user2_data)
|
||||
login2 = client.post("/api/v1/auth/login", json=user2_data)
|
||||
token2 = login2.json()["access_token"]
|
||||
headers2 = {"Authorization": f"Bearer {token2}"}
|
||||
|
||||
# User2 tries to access User1's board
|
||||
response = client.get(f"/api/v1/boards/{board_id}", headers=headers2)
|
||||
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
def test_users_cannot_update_each_others_boards(self, client: TestClient):
|
||||
"""Test that users can't update boards they don't own."""
|
||||
# Create user1 and board
|
||||
user1_data = {"email": "user1@example.com", "password": "Password123"}
|
||||
client.post("/api/v1/auth/register", json=user1_data)
|
||||
login1 = client.post("/api/v1/auth/login", json=user1_data)
|
||||
token1 = login1.json()["access_token"]
|
||||
headers1 = {"Authorization": f"Bearer {token1}"}
|
||||
|
||||
create_response = client.post(
|
||||
"/api/v1/boards", json={"title": "User 1 Board"}, headers=headers1
|
||||
)
|
||||
board_id = create_response.json()["id"]
|
||||
|
||||
# Create user2
|
||||
user2_data = {"email": "user2@example.com", "password": "Password456"}
|
||||
client.post("/api/v1/auth/register", json=user2_data)
|
||||
login2 = client.post("/api/v1/auth/login", json=user2_data)
|
||||
token2 = login2.json()["access_token"]
|
||||
headers2 = {"Authorization": f"Bearer {token2}"}
|
||||
|
||||
# User2 tries to update User1's board
|
||||
response = client.patch(
|
||||
f"/api/v1/boards/{board_id}", json={"title": "Hacked Title"}, headers=headers2
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
# Verify original board unchanged
|
||||
original = client.get(f"/api/v1/boards/{board_id}", headers=headers1)
|
||||
assert original.json()["title"] == "User 1 Board"
|
||||
|
||||
def test_users_cannot_delete_each_others_boards(self, client: TestClient):
|
||||
"""Test that users can't delete boards they don't own."""
|
||||
# Create user1 and board
|
||||
user1_data = {"email": "user1@example.com", "password": "Password123"}
|
||||
client.post("/api/v1/auth/register", json=user1_data)
|
||||
login1 = client.post("/api/v1/auth/login", json=user1_data)
|
||||
token1 = login1.json()["access_token"]
|
||||
headers1 = {"Authorization": f"Bearer {token1}"}
|
||||
|
||||
create_response = client.post(
|
||||
"/api/v1/boards", json={"title": "User 1 Board"}, headers=headers1
|
||||
)
|
||||
board_id = create_response.json()["id"]
|
||||
|
||||
# Create user2
|
||||
user2_data = {"email": "user2@example.com", "password": "Password456"}
|
||||
client.post("/api/v1/auth/register", json=user2_data)
|
||||
login2 = client.post("/api/v1/auth/login", json=user2_data)
|
||||
token2 = login2.json()["access_token"]
|
||||
headers2 = {"Authorization": f"Bearer {token2}"}
|
||||
|
||||
# User2 tries to delete User1's board
|
||||
response = client.delete(f"/api/v1/boards/{board_id}", headers=headers2)
|
||||
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
# Verify board still exists for user1
|
||||
still_exists = client.get(f"/api/v1/boards/{board_id}", headers=headers1)
|
||||
assert still_exists.status_code == status.HTTP_200_OK
|
||||
|
||||
|
||||
class TestBoardCRUDFlow:
|
||||
"""Test complete board CRUD flow."""
|
||||
|
||||
def test_complete_board_lifecycle(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test create → read → update → delete flow."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
# CREATE
|
||||
create_data = {"title": "My Board", "description": "Initial description"}
|
||||
create_response = client.post("/api/v1/boards", json=create_data, headers=headers)
|
||||
|
||||
assert create_response.status_code == status.HTTP_201_CREATED
|
||||
board_id = create_response.json()["id"]
|
||||
|
||||
# READ
|
||||
get_response = client.get(f"/api/v1/boards/{board_id}", headers=headers)
|
||||
|
||||
assert get_response.status_code == status.HTTP_200_OK
|
||||
assert get_response.json()["title"] == "My Board"
|
||||
|
||||
# UPDATE
|
||||
update_data = {"title": "Updated Board", "description": "Updated description"}
|
||||
update_response = client.patch(
|
||||
f"/api/v1/boards/{board_id}", json=update_data, headers=headers
|
||||
)
|
||||
|
||||
assert update_response.status_code == status.HTTP_200_OK
|
||||
assert update_response.json()["title"] == "Updated Board"
|
||||
|
||||
# DELETE
|
||||
delete_response = client.delete(f"/api/v1/boards/{board_id}", headers=headers)
|
||||
|
||||
assert delete_response.status_code == status.HTTP_204_NO_CONTENT
|
||||
|
||||
# VERIFY DELETED
|
||||
get_deleted = client.get(f"/api/v1/boards/{board_id}", headers=headers)
|
||||
assert get_deleted.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
def test_board_appears_in_list_after_creation(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test that newly created board appears in list."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
# List should be empty
|
||||
initial_list = client.get("/api/v1/boards", headers=headers)
|
||||
assert initial_list.json()["total"] == 0
|
||||
|
||||
# Create board
|
||||
client.post("/api/v1/boards", json={"title": "New Board"}, headers=headers)
|
||||
|
||||
# List should now contain 1 board
|
||||
updated_list = client.get("/api/v1/boards", headers=headers)
|
||||
data = updated_list.json()
|
||||
|
||||
assert data["total"] == 1
|
||||
assert data["boards"][0]["title"] == "New Board"
|
||||
|
||||
def test_board_updates_reflect_in_list(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test that board updates are reflected in the list."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
# Create board
|
||||
create_response = client.post(
|
||||
"/api/v1/boards", json={"title": "Original"}, headers=headers
|
||||
)
|
||||
board_id = create_response.json()["id"]
|
||||
|
||||
# Update board
|
||||
client.patch(f"/api/v1/boards/{board_id}", json={"title": "Updated"}, headers=headers)
|
||||
|
||||
# Check list
|
||||
list_response = client.get("/api/v1/boards", headers=headers)
|
||||
boards = list_response.json()["boards"]
|
||||
|
||||
assert len(boards) == 1
|
||||
assert boards[0]["title"] == "Updated"
|
||||
|
||||
def test_viewport_state_persists(self, authenticated_client: tuple[TestClient, dict]):
|
||||
"""Test that viewport state persists across updates."""
|
||||
client, headers = authenticated_client
|
||||
|
||||
# Create board
|
||||
create_response = client.post(
|
||||
"/api/v1/boards", json={"title": "Test Board"}, headers=headers
|
||||
)
|
||||
board_id = create_response.json()["id"]
|
||||
|
||||
# Update viewport
|
||||
viewport1 = {"x": 100, "y": 100, "zoom": 2.0, "rotation": 90}
|
||||
client.patch(
|
||||
f"/api/v1/boards/{board_id}", json={"viewport_state": viewport1}, headers=headers
|
||||
)
|
||||
|
||||
# Update title (shouldn't affect viewport)
|
||||
client.patch(f"/api/v1/boards/{board_id}", json={"title": "New Title"}, headers=headers)
|
||||
|
||||
# Get board and verify viewport persisted
|
||||
get_response = client.get(f"/api/v1/boards/{board_id}", headers=headers)
|
||||
data = get_response.json()
|
||||
|
||||
assert data["title"] == "New Title"
|
||||
assert data["viewport_state"]["x"] == 100
|
||||
assert data["viewport_state"]["zoom"] == 2.0
|
||||
|
||||
378
backend/tests/api/test_bulk_operations.py
Normal file
378
backend/tests/api/test_bulk_operations.py
Normal file
@@ -0,0 +1,378 @@
|
||||
"""Integration tests for bulk image operations."""
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from httpx import AsyncClient
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database.models.board import Board
|
||||
from app.database.models.board_image import BoardImage
|
||||
from app.database.models.image import Image
|
||||
from app.database.models.user import User
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_bulk_update_position_delta(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test bulk updating positions with delta."""
|
||||
# Create board
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
# Create images
|
||||
images = []
|
||||
board_images = []
|
||||
|
||||
for i in range(3):
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename=f"test{i}.jpg",
|
||||
storage_path=f"{test_user.id}/test{i}.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": f"abc{i}"},
|
||||
)
|
||||
db.add(image)
|
||||
images.append(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100 * i, "y": 100 * i},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=i,
|
||||
)
|
||||
db.add(board_image)
|
||||
board_images.append(board_image)
|
||||
|
||||
await db.commit()
|
||||
|
||||
# Bulk update position
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/bulk",
|
||||
json={
|
||||
"image_ids": [str(img.id) for img in images[:2]], # First 2 images
|
||||
"position_delta": {"dx": 50, "dy": 75},
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["updated_count"] == 2
|
||||
assert data["failed_count"] == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_bulk_update_transformations(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test bulk updating transformations."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
images = []
|
||||
for i in range(2):
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename=f"test{i}.jpg",
|
||||
storage_path=f"{test_user.id}/test{i}.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": f"abc{i}"},
|
||||
)
|
||||
db.add(image)
|
||||
images.append(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
|
||||
await db.commit()
|
||||
|
||||
# Bulk update transformations
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/bulk",
|
||||
json={
|
||||
"image_ids": [str(img.id) for img in images],
|
||||
"transformations": {
|
||||
"scale": 2.0,
|
||||
"rotation": 45,
|
||||
"opacity": 0.8,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["updated_count"] == 2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_bulk_update_z_order_delta(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test bulk updating Z-order with delta."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
images = []
|
||||
for i in range(3):
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename=f"test{i}.jpg",
|
||||
storage_path=f"{test_user.id}/test{i}.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": f"abc{i}"},
|
||||
)
|
||||
db.add(image)
|
||||
images.append(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=i,
|
||||
)
|
||||
db.add(board_image)
|
||||
|
||||
await db.commit()
|
||||
|
||||
# Bulk update Z-order
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/bulk",
|
||||
json={
|
||||
"image_ids": [str(images[0].id), str(images[1].id)],
|
||||
"z_order_delta": 10,
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["updated_count"] == 2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_bulk_update_mixed_operations(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test bulk update with position, transformations, and z-order together."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
images = []
|
||||
for i in range(2):
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename=f"test{i}.jpg",
|
||||
storage_path=f"{test_user.id}/test{i}.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": f"abc{i}"},
|
||||
)
|
||||
db.add(image)
|
||||
images.append(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
|
||||
await db.commit()
|
||||
|
||||
# Bulk update everything
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/bulk",
|
||||
json={
|
||||
"image_ids": [str(img.id) for img in images],
|
||||
"position_delta": {"dx": 50, "dy": 50},
|
||||
"transformations": {"scale": 2.0},
|
||||
"z_order_delta": 5,
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["updated_count"] == 2
|
||||
assert data["failed_count"] == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_bulk_update_non_existent_image(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test bulk update with some non-existent images."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc"},
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
await db.commit()
|
||||
|
||||
# Try to update with one valid and one invalid ID
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/bulk",
|
||||
json={
|
||||
"image_ids": [str(image.id), str(uuid4())], # One valid, one invalid
|
||||
"transformations": {"scale": 2.0},
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["updated_count"] == 1 # Only valid one updated
|
||||
assert data["failed_count"] == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_bulk_update_unauthorized(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test bulk update on board not owned by user."""
|
||||
# Create another user
|
||||
other_user = User(id=uuid4(), email="other@example.com", password_hash="hashed")
|
||||
db.add(other_user)
|
||||
|
||||
# Create board owned by other user
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=other_user.id,
|
||||
title="Other Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
await db.commit()
|
||||
|
||||
# Try bulk update as current user
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/bulk",
|
||||
json={
|
||||
"image_ids": [str(uuid4())],
|
||||
"transformations": {"scale": 2.0},
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_bulk_update_empty_image_list(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test bulk update with empty image list."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
await db.commit()
|
||||
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/bulk",
|
||||
json={
|
||||
"image_ids": [],
|
||||
"transformations": {"scale": 2.0},
|
||||
},
|
||||
)
|
||||
|
||||
# Should succeed with 0 updated
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["updated_count"] == 0
|
||||
|
||||
289
backend/tests/api/test_groups.py
Normal file
289
backend/tests/api/test_groups.py
Normal file
@@ -0,0 +1,289 @@
|
||||
"""Integration tests for group endpoints."""
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from httpx import AsyncClient
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.database.models.board import Board
|
||||
from app.database.models.board_image import BoardImage
|
||||
from app.database.models.image import Image
|
||||
from app.database.models.user import User
|
||||
|
||||
pytestmark = pytest.mark.asyncio
|
||||
|
||||
|
||||
async def test_create_group(client: AsyncClient, test_user: User, db: Session):
|
||||
"""Test creating a group with images."""
|
||||
# Create board
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
# Create images
|
||||
images = []
|
||||
for i in range(3):
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename=f"test{i}.jpg",
|
||||
storage_path=f"{test_user.id}/test{i}.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": f"abc{i}"},
|
||||
)
|
||||
db.add(image)
|
||||
images.append(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={"scale": 1.0, "rotation": 0, "opacity": 1.0},
|
||||
z_order=i,
|
||||
)
|
||||
db.add(board_image)
|
||||
|
||||
db.commit()
|
||||
|
||||
# Create group
|
||||
response = await client.post(
|
||||
f"/api/boards/{board.id}/groups",
|
||||
json={
|
||||
"name": "Test Group",
|
||||
"color": "#FF5733",
|
||||
"annotation": "Group annotation",
|
||||
"image_ids": [str(img.id) for img in images[:2]],
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 201
|
||||
data = response.json()
|
||||
assert data["name"] == "Test Group"
|
||||
assert data["color"] == "#FF5733"
|
||||
assert data["annotation"] == "Group annotation"
|
||||
assert data["member_count"] == 2
|
||||
|
||||
|
||||
async def test_list_groups(client: AsyncClient, test_user: User, db: Session):
|
||||
"""Test listing groups on a board."""
|
||||
from app.database.models.group import Group
|
||||
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
# Create groups
|
||||
for i in range(3):
|
||||
group = Group(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
name=f"Group {i}",
|
||||
color=f"#FF573{i}",
|
||||
annotation=f"Annotation {i}",
|
||||
)
|
||||
db.add(group)
|
||||
|
||||
db.commit()
|
||||
|
||||
# List groups
|
||||
response = await client.get(f"/api/boards/{board.id}/groups")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) == 3
|
||||
assert data[0]["name"] == "Group 2" # Most recent first
|
||||
|
||||
|
||||
async def test_get_group(client: AsyncClient, test_user: User, db: Session):
|
||||
"""Test getting a specific group."""
|
||||
from app.database.models.group import Group
|
||||
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
group = Group(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
name="Test Group",
|
||||
color="#FF5733",
|
||||
annotation="Test annotation",
|
||||
)
|
||||
db.add(group)
|
||||
db.commit()
|
||||
|
||||
# Get group
|
||||
response = await client.get(f"/api/boards/{board.id}/groups/{group.id}")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["name"] == "Test Group"
|
||||
assert data["color"] == "#FF5733"
|
||||
|
||||
|
||||
async def test_update_group(client: AsyncClient, test_user: User, db: Session):
|
||||
"""Test updating group metadata."""
|
||||
from app.database.models.group import Group
|
||||
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
group = Group(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
name="Original Name",
|
||||
color="#FF5733",
|
||||
annotation="Original annotation",
|
||||
)
|
||||
db.add(group)
|
||||
db.commit()
|
||||
|
||||
# Update group
|
||||
response = await client.patch(
|
||||
f"/api/boards/{board.id}/groups/{group.id}",
|
||||
json={
|
||||
"name": "Updated Name",
|
||||
"color": "#00FF00",
|
||||
"annotation": "Updated annotation",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["name"] == "Updated Name"
|
||||
assert data["color"] == "#00FF00"
|
||||
assert data["annotation"] == "Updated annotation"
|
||||
|
||||
|
||||
async def test_delete_group(client: AsyncClient, test_user: User, db: Session):
|
||||
"""Test deleting a group."""
|
||||
from app.database.models.group import Group
|
||||
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
# Create image
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc"},
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
# Create group
|
||||
group = Group(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
name="Test Group",
|
||||
color="#FF5733",
|
||||
)
|
||||
db.add(group)
|
||||
|
||||
# Add image to board and group
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={"scale": 1.0, "rotation": 0, "opacity": 1.0},
|
||||
z_order=0,
|
||||
group_id=group.id,
|
||||
)
|
||||
db.add(board_image)
|
||||
db.commit()
|
||||
|
||||
# Delete group
|
||||
response = await client.delete(f"/api/boards/{board.id}/groups/{group.id}")
|
||||
|
||||
assert response.status_code == 204
|
||||
|
||||
# Verify image is ungrouped
|
||||
db.refresh(board_image)
|
||||
assert board_image.group_id is None
|
||||
|
||||
|
||||
async def test_group_unauthorized_board(client: AsyncClient, test_user: User, db: Session):
|
||||
"""Test that users can't create groups on boards they don't own."""
|
||||
# Create another user
|
||||
other_user = User(id=uuid4(), email="other@example.com", password_hash="hashed")
|
||||
db.add(other_user)
|
||||
|
||||
# Create board owned by other user
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=other_user.id,
|
||||
title="Other Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
db.commit()
|
||||
|
||||
# Try to create group
|
||||
response = await client.post(
|
||||
f"/api/boards/{board.id}/groups",
|
||||
json={
|
||||
"name": "Test Group",
|
||||
"color": "#FF5733",
|
||||
"image_ids": [str(uuid4())],
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 404 # Board not found (for security)
|
||||
|
||||
|
||||
async def test_invalid_color_format(client: AsyncClient, test_user: User, db: Session):
|
||||
"""Test that invalid color formats are rejected."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
db.commit()
|
||||
|
||||
# Try with invalid color
|
||||
response = await client.post(
|
||||
f"/api/boards/{board.id}/groups",
|
||||
json={
|
||||
"name": "Test Group",
|
||||
"color": "red", # Invalid: not hex
|
||||
"image_ids": [str(uuid4())],
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 422
|
||||
|
||||
221
backend/tests/api/test_image_delete.py
Normal file
221
backend/tests/api/test_image_delete.py
Normal file
@@ -0,0 +1,221 @@
|
||||
"""Integration tests for image deletion endpoints."""
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from httpx import AsyncClient
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database.models.board import Board
|
||||
from app.database.models.board_image import BoardImage
|
||||
from app.database.models.image import Image
|
||||
from app.database.models.user import User
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_image_from_board(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test removing image from board (not deleting)."""
|
||||
# Create board and image
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
reference_count=1,
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
await db.commit()
|
||||
|
||||
# Remove from board
|
||||
response = await client.delete(f"/api/images/boards/{board.id}/images/{image.id}")
|
||||
|
||||
assert response.status_code == 204
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_image_not_on_board(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test removing image that's not on the board."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
)
|
||||
db.add(image)
|
||||
await db.commit()
|
||||
|
||||
# Try to remove (image not on board)
|
||||
response = await client.delete(f"/api/images/boards/{board.id}/images/{image.id}")
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_image_unauthorized(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test removing image from board not owned by user."""
|
||||
# Create another user
|
||||
other_user = User(id=uuid4(), email="other@example.com", password_hash="hashed")
|
||||
db.add(other_user)
|
||||
|
||||
# Create board owned by other user
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=other_user.id,
|
||||
title="Other Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=other_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{other_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
await db.commit()
|
||||
|
||||
# Try to remove as current user
|
||||
response = await client.delete(f"/api/images/boards/{board.id}/images/{image.id}")
|
||||
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_permanent_delete_image(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test permanently deleting image from library."""
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
reference_count=0, # Not used on any boards
|
||||
)
|
||||
db.add(image)
|
||||
await db.commit()
|
||||
|
||||
# Delete permanently
|
||||
response = await client.delete(f"/api/images/{image.id}")
|
||||
|
||||
assert response.status_code == 204
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cannot_delete_image_in_use(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test that images in use cannot be permanently deleted."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
reference_count=1, # Used on a board
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
await db.commit()
|
||||
|
||||
# Try to delete
|
||||
response = await client.delete(f"/api/images/{image.id}")
|
||||
|
||||
assert response.status_code == 400
|
||||
assert "still used" in response.json()["detail"].lower()
|
||||
|
||||
455
backend/tests/api/test_image_position.py
Normal file
455
backend/tests/api/test_image_position.py
Normal file
@@ -0,0 +1,455 @@
|
||||
"""Integration tests for image position update endpoint."""
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from httpx import AsyncClient
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database.models.board import Board
|
||||
from app.database.models.board_image import BoardImage
|
||||
from app.database.models.image import Image
|
||||
from app.database.models.user import User
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_image_position(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test updating image position on board."""
|
||||
# Create a board
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
# Create an image
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
# Add image to board
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
await db.commit()
|
||||
|
||||
# Update position
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/{image.id}",
|
||||
json={"position": {"x": 200, "y": 250}},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["position"]["x"] == 200
|
||||
assert data["position"]["y"] == 250
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_image_transformations(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test updating image transformations."""
|
||||
# Create board, image, and board_image
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
await db.commit()
|
||||
|
||||
# Update transformations
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/{image.id}",
|
||||
json={
|
||||
"transformations": {
|
||||
"scale": 1.5,
|
||||
"rotation": 45,
|
||||
"opacity": 0.8,
|
||||
"flipped_h": True,
|
||||
"flipped_v": False,
|
||||
"greyscale": True,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["transformations"]["scale"] == 1.5
|
||||
assert data["transformations"]["rotation"] == 45
|
||||
assert data["transformations"]["opacity"] == 0.8
|
||||
assert data["transformations"]["flipped_h"] is True
|
||||
assert data["transformations"]["greyscale"] is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_image_z_order(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test updating image Z-order."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
await db.commit()
|
||||
|
||||
# Update Z-order
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/{image.id}",
|
||||
json={"z_order": 5},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["z_order"] == 5
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_multiple_fields(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test updating position, transformations, and z-order together."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
await db.commit()
|
||||
|
||||
# Update everything
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/{image.id}",
|
||||
json={
|
||||
"position": {"x": 300, "y": 400},
|
||||
"transformations": {"scale": 2.0, "rotation": 90},
|
||||
"z_order": 10,
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["position"]["x"] == 300
|
||||
assert data["position"]["y"] == 400
|
||||
assert data["transformations"]["scale"] == 2.0
|
||||
assert data["transformations"]["rotation"] == 90
|
||||
assert data["z_order"] == 10
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_image_not_on_board(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test updating image that's not on the specified board."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
)
|
||||
db.add(image)
|
||||
await db.commit()
|
||||
|
||||
# Try to update image that's not on board
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/{image.id}",
|
||||
json={"position": {"x": 200, "y": 200}},
|
||||
)
|
||||
|
||||
assert response.status_code == 404
|
||||
assert "not on this board" in response.json()["detail"].lower()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_image_invalid_position(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test updating with invalid position data."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
await db.commit()
|
||||
|
||||
# Try to update with missing y coordinate
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/{image.id}",
|
||||
json={"position": {"x": 200}},
|
||||
)
|
||||
|
||||
assert response.status_code == 422
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_image_unauthorized(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test that other users cannot update images on boards they don't own."""
|
||||
# Create another user
|
||||
other_user = User(id=uuid4(), email="other@example.com", password_hash="hashed")
|
||||
db.add(other_user)
|
||||
|
||||
# Create board owned by other user
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=other_user.id,
|
||||
title="Other User's Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=other_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{other_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
await db.commit()
|
||||
|
||||
# Try to update as current user (should fail)
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/{image.id}",
|
||||
json={"position": {"x": 200, "y": 200}},
|
||||
)
|
||||
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_preserves_other_fields(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test that updating one field preserves others."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.5,
|
||||
"rotation": 45,
|
||||
"opacity": 0.9,
|
||||
"flipped_h": True,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=3,
|
||||
)
|
||||
db.add(board_image)
|
||||
await db.commit()
|
||||
|
||||
# Update only position
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/{image.id}",
|
||||
json={"position": {"x": 200, "y": 200}},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Position should be updated
|
||||
assert data["position"]["x"] == 200
|
||||
assert data["position"]["y"] == 200
|
||||
|
||||
# Other fields should be preserved
|
||||
assert data["transformations"]["scale"] == 1.5
|
||||
assert data["transformations"]["rotation"] == 45
|
||||
assert data["transformations"]["opacity"] == 0.9
|
||||
assert data["z_order"] == 3
|
||||
|
||||
156
backend/tests/api/test_images.py
Normal file
156
backend/tests/api/test_images.py
Normal file
@@ -0,0 +1,156 @@
|
||||
"""Integration tests for image upload endpoints."""
|
||||
|
||||
import io
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from fastapi import status
|
||||
from httpx import AsyncClient
|
||||
from PIL import Image as PILImage
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestImageUpload:
|
||||
"""Tests for image upload endpoint."""
|
||||
|
||||
async def test_upload_image_success(self, client: AsyncClient, auth_headers: dict):
|
||||
"""Test successful image upload."""
|
||||
# Create a test image
|
||||
image = PILImage.new("RGB", (800, 600), color="red")
|
||||
buffer = io.BytesIO()
|
||||
image.save(buffer, format="JPEG")
|
||||
buffer.seek(0)
|
||||
|
||||
# Mock storage and processing
|
||||
with patch("app.images.validation.magic.from_buffer") as mock_magic:
|
||||
mock_magic.return_value = "image/jpeg"
|
||||
|
||||
with patch("app.api.images.upload_image_to_storage") as mock_upload:
|
||||
mock_upload.return_value = ("storage/path.jpg", 800, 600, "image/jpeg")
|
||||
|
||||
with patch("app.api.images.generate_thumbnails") as mock_thumbs:
|
||||
mock_thumbs.return_value = {
|
||||
"low": "thumbs/low.webp",
|
||||
"medium": "thumbs/medium.webp",
|
||||
"high": "thumbs/high.webp",
|
||||
}
|
||||
|
||||
# Upload image
|
||||
response = await client.post(
|
||||
"/api/v1/images/upload",
|
||||
headers=auth_headers,
|
||||
files={"file": ("test.jpg", buffer, "image/jpeg")},
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
data = response.json()
|
||||
assert "id" in data
|
||||
assert data["filename"] == "test.jpg"
|
||||
assert data["width"] == 800
|
||||
assert data["height"] == 600
|
||||
|
||||
async def test_upload_image_unauthenticated(self, client: AsyncClient):
|
||||
"""Test upload without authentication fails."""
|
||||
image = PILImage.new("RGB", (800, 600), color="red")
|
||||
buffer = io.BytesIO()
|
||||
image.save(buffer, format="JPEG")
|
||||
buffer.seek(0)
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/images/upload", files={"file": ("test.jpg", buffer, "image/jpeg")}
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
async def test_upload_invalid_file_type(self, client: AsyncClient, auth_headers: dict):
|
||||
"""Test upload with invalid file type."""
|
||||
# Create a text file disguised as image
|
||||
buffer = io.BytesIO(b"This is not an image")
|
||||
|
||||
with patch("app.images.validation.magic.from_buffer") as mock_magic:
|
||||
mock_magic.return_value = "text/plain"
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/images/upload",
|
||||
headers=auth_headers,
|
||||
files={"file": ("fake.jpg", buffer, "image/jpeg")},
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert "invalid" in response.json()["detail"].lower()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestImageLibrary:
|
||||
"""Tests for image library endpoint."""
|
||||
|
||||
async def test_get_image_library(self, client: AsyncClient, auth_headers: dict):
|
||||
"""Test retrieving user's image library."""
|
||||
response = await client.get("/api/v1/images/library", headers=auth_headers)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()
|
||||
assert "images" in data
|
||||
assert "total" in data
|
||||
assert "page" in data
|
||||
assert isinstance(data["images"], list)
|
||||
|
||||
async def test_get_image_library_pagination(self, client: AsyncClient, auth_headers: dict):
|
||||
"""Test library pagination."""
|
||||
response = await client.get(
|
||||
"/api/v1/images/library", params={"page": 2, "page_size": 10}, headers=auth_headers
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()
|
||||
assert data["page"] == 2
|
||||
assert data["page_size"] == 10
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestBoardImages:
|
||||
"""Tests for adding images to boards."""
|
||||
|
||||
async def test_add_image_to_board(
|
||||
self, client: AsyncClient, auth_headers: dict, test_board_id: str, test_image_id: str
|
||||
):
|
||||
"""Test adding image to board."""
|
||||
payload = {
|
||||
"image_id": test_image_id,
|
||||
"position": {"x": 100, "y": 200},
|
||||
"transformations": {
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
"z_order": 0,
|
||||
}
|
||||
|
||||
response = await client.post(
|
||||
f"/api/v1/images/boards/{test_board_id}/images", headers=auth_headers, json=payload
|
||||
)
|
||||
|
||||
# May fail if test_board_id/test_image_id fixtures aren't set up
|
||||
# This is a placeholder for the structure
|
||||
if response.status_code == status.HTTP_201_CREATED:
|
||||
data = response.json()
|
||||
assert "id" in data
|
||||
assert data["image_id"] == test_image_id
|
||||
assert data["position"]["x"] == 100
|
||||
|
||||
async def test_get_board_images(
|
||||
self, client: AsyncClient, auth_headers: dict, test_board_id: str
|
||||
):
|
||||
"""Test getting all images on a board."""
|
||||
response = await client.get(
|
||||
f"/api/v1/images/boards/{test_board_id}/images", headers=auth_headers
|
||||
)
|
||||
|
||||
# May return 404 if board doesn't exist in test DB
|
||||
if response.status_code == status.HTTP_200_OK:
|
||||
data = response.json()
|
||||
assert isinstance(data, list)
|
||||
|
||||
302
backend/tests/api/test_sharing.py
Normal file
302
backend/tests/api/test_sharing.py
Normal file
@@ -0,0 +1,302 @@
|
||||
"""Tests for board sharing endpoints."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import pytest
|
||||
from fastapi import status
|
||||
|
||||
|
||||
def test_create_share_link_view_only(client, auth_headers, test_board):
|
||||
"""Test creating a view-only share link."""
|
||||
response = client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "view-only"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
data = response.json()
|
||||
assert data["permission_level"] == "view-only"
|
||||
assert data["board_id"] == str(test_board.id)
|
||||
assert data["token"] is not None
|
||||
assert len(data["token"]) == 64
|
||||
assert data["is_revoked"] == False # noqa: E712
|
||||
assert data["access_count"] == 0
|
||||
|
||||
|
||||
def test_create_share_link_view_comment(client, auth_headers, test_board):
|
||||
"""Test creating a view-comment share link."""
|
||||
response = client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "view-comment"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
data = response.json()
|
||||
assert data["permission_level"] == "view-comment"
|
||||
|
||||
|
||||
def test_create_share_link_with_expiration(client, auth_headers, test_board):
|
||||
"""Test creating a share link with expiration."""
|
||||
expires_at = (datetime.utcnow() + timedelta(days=7)).isoformat()
|
||||
response = client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "view-only", "expires_at": expires_at},
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
data = response.json()
|
||||
assert data["expires_at"] is not None
|
||||
|
||||
|
||||
def test_create_share_link_invalid_permission(client, auth_headers, test_board):
|
||||
"""Test creating share link with invalid permission level."""
|
||||
response = client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "invalid-permission"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
|
||||
|
||||
|
||||
def test_create_share_link_unauthorized(client, test_board):
|
||||
"""Test creating share link without authentication."""
|
||||
response = client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "view-only"},
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
|
||||
def test_create_share_link_not_owner(client, other_auth_headers, test_board):
|
||||
"""Test creating share link for board user doesn't own."""
|
||||
response = client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "view-only"},
|
||||
headers=other_auth_headers,
|
||||
)
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
def test_list_share_links(client, auth_headers, test_board):
|
||||
"""Test listing all share links for a board."""
|
||||
# Create multiple share links
|
||||
client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "view-only"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "view-comment"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
|
||||
response = client.get(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()
|
||||
assert len(data) >= 2
|
||||
assert all("token" in link for link in data)
|
||||
|
||||
|
||||
def test_list_share_links_unauthorized(client, test_board):
|
||||
"""Test listing share links without authentication."""
|
||||
response = client.get(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
|
||||
def test_revoke_share_link(client, auth_headers, test_board):
|
||||
"""Test revoking a share link."""
|
||||
# Create a share link
|
||||
create_response = client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "view-only"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
link_id = create_response.json()["id"]
|
||||
|
||||
# Revoke it
|
||||
response = client.delete(
|
||||
f"/api/boards/{test_board.id}/share-links/{link_id}",
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert response.status_code == status.HTTP_204_NO_CONTENT
|
||||
|
||||
# Verify it's revoked by listing
|
||||
list_response = client.get(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
headers=auth_headers,
|
||||
)
|
||||
revoked_link = next((link for link in list_response.json() if link["id"] == link_id), None)
|
||||
assert revoked_link is not None
|
||||
assert revoked_link["is_revoked"] == True # noqa: E712
|
||||
|
||||
|
||||
def test_revoke_share_link_not_found(client, auth_headers, test_board):
|
||||
"""Test revoking non-existent share link."""
|
||||
import uuid
|
||||
|
||||
fake_id = uuid.uuid4()
|
||||
response = client.delete(
|
||||
f"/api/boards/{test_board.id}/share-links/{fake_id}",
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
def test_access_shared_board(client, auth_headers, test_board):
|
||||
"""Test accessing a board via share link."""
|
||||
# Create share link
|
||||
create_response = client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "view-only"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
token = create_response.json()["token"]
|
||||
|
||||
# Access shared board (no auth required)
|
||||
response = client.get(f"/api/shared/{token}")
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()
|
||||
assert data["id"] == str(test_board.id)
|
||||
assert data["title"] == test_board.title
|
||||
|
||||
|
||||
def test_access_shared_board_invalid_token(client):
|
||||
"""Test accessing board with invalid token."""
|
||||
response = client.get("/api/shared/invalid-token-12345")
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
|
||||
def test_access_shared_board_revoked_token(client, auth_headers, test_board):
|
||||
"""Test accessing board with revoked token."""
|
||||
# Create and revoke share link
|
||||
create_response = client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "view-only"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
data = create_response.json()
|
||||
token = data["token"]
|
||||
link_id = data["id"]
|
||||
|
||||
client.delete(
|
||||
f"/api/boards/{test_board.id}/share-links/{link_id}",
|
||||
headers=auth_headers,
|
||||
)
|
||||
|
||||
# Try to access with revoked token
|
||||
response = client.get(f"/api/shared/{token}")
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
|
||||
def test_create_comment_on_shared_board(client, auth_headers, test_board):
|
||||
"""Test creating a comment via share link with view-comment permission."""
|
||||
# Create view-comment share link
|
||||
create_response = client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "view-comment"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
token = create_response.json()["token"]
|
||||
|
||||
# Create comment (no auth required, just token)
|
||||
comment_data = {
|
||||
"author_name": "Test Viewer",
|
||||
"content": "This is a test comment",
|
||||
"position": {"x": 100, "y": 200},
|
||||
}
|
||||
response = client.post(f"/api/shared/{token}/comments", json=comment_data)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
data = response.json()
|
||||
assert data["author_name"] == "Test Viewer"
|
||||
assert data["content"] == "This is a test comment"
|
||||
assert data["position"]["x"] == 100
|
||||
|
||||
|
||||
def test_create_comment_view_only_permission_denied(client, auth_headers, test_board):
|
||||
"""Test creating comment with view-only permission fails."""
|
||||
# Create view-only share link
|
||||
create_response = client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "view-only"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
token = create_response.json()["token"]
|
||||
|
||||
# Try to create comment (should fail)
|
||||
comment_data = {
|
||||
"author_name": "Test Viewer",
|
||||
"content": "This should fail",
|
||||
}
|
||||
response = client.post(f"/api/shared/{token}/comments", json=comment_data)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
|
||||
def test_list_comments_on_shared_board(client, auth_headers, test_board):
|
||||
"""Test listing comments via share link."""
|
||||
# Create view-comment share link
|
||||
create_response = client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "view-comment"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
token = create_response.json()["token"]
|
||||
|
||||
# Create a comment
|
||||
client.post(
|
||||
f"/api/shared/{token}/comments",
|
||||
json={"author_name": "Viewer 1", "content": "Comment 1"},
|
||||
)
|
||||
|
||||
# List comments
|
||||
response = client.get(f"/api/shared/{token}/comments")
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()
|
||||
assert len(data) >= 1
|
||||
assert data[0]["content"] == "Comment 1"
|
||||
|
||||
|
||||
def test_list_board_comments_as_owner(client, auth_headers, test_board):
|
||||
"""Test board owner listing all comments."""
|
||||
# Create share link and comment
|
||||
create_response = client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "view-comment"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
token = create_response.json()["token"]
|
||||
client.post(
|
||||
f"/api/shared/{token}/comments",
|
||||
json={"author_name": "Viewer", "content": "Test comment"},
|
||||
)
|
||||
|
||||
# Owner lists comments
|
||||
response = client.get(
|
||||
f"/api/boards/{test_board.id}/comments",
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()
|
||||
assert len(data) >= 1
|
||||
|
||||
|
||||
def test_token_uniqueness(client, auth_headers, test_board):
|
||||
"""Test that generated tokens are unique."""
|
||||
tokens = set()
|
||||
for _ in range(10):
|
||||
response = client.post(
|
||||
f"/api/boards/{test_board.id}/share-links",
|
||||
json={"permission_level": "view-only"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
token = response.json()["token"]
|
||||
tokens.add(token)
|
||||
|
||||
# All tokens should be unique
|
||||
assert len(tokens) == 10
|
||||
|
||||
299
backend/tests/api/test_z_order.py
Normal file
299
backend/tests/api/test_z_order.py
Normal file
@@ -0,0 +1,299 @@
|
||||
"""Integration tests for Z-order persistence."""
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from httpx import AsyncClient
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database.models.board import Board
|
||||
from app.database.models.board_image import BoardImage
|
||||
from app.database.models.image import Image
|
||||
from app.database.models.user import User
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_z_order(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test updating Z-order of an image."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
await db.commit()
|
||||
|
||||
# Update Z-order
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/{image.id}",
|
||||
json={"z_order": 5},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["z_order"] == 5
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_z_order_persists_across_requests(
|
||||
client: AsyncClient, test_user: User, db: AsyncSession
|
||||
):
|
||||
"""Test that Z-order changes persist."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
await db.commit()
|
||||
|
||||
# Update Z-order
|
||||
await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/{image.id}",
|
||||
json={"z_order": 10},
|
||||
)
|
||||
|
||||
# Fetch board images to verify persistence
|
||||
response = await client.get(f"/api/images/boards/{board.id}/images")
|
||||
|
||||
assert response.status_code == 200
|
||||
board_images = response.json()
|
||||
assert len(board_images) == 1
|
||||
assert board_images[0]["z_order"] == 10
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_multiple_images_z_order(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test Z-order with multiple images."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
images = []
|
||||
for i in range(3):
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename=f"test{i}.jpg",
|
||||
storage_path=f"{test_user.id}/test{i}.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": f"abc{i}"},
|
||||
)
|
||||
db.add(image)
|
||||
images.append(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=i,
|
||||
)
|
||||
db.add(board_image)
|
||||
|
||||
await db.commit()
|
||||
|
||||
# Update Z-order of middle image to be highest
|
||||
await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/{images[1].id}",
|
||||
json={"z_order": 10},
|
||||
)
|
||||
|
||||
# Verify
|
||||
response = await client.get(f"/api/images/boards/{board.id}/images")
|
||||
board_images = response.json()
|
||||
|
||||
# Find the updated image
|
||||
updated = next((bi for bi in board_images if str(bi["image_id"]) == str(images[1].id)), None)
|
||||
assert updated is not None
|
||||
assert updated["z_order"] == 10
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_z_order_negative_value(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test that negative Z-order is allowed (for layering below 0)."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
await db.commit()
|
||||
|
||||
# Set negative Z-order
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/{image.id}",
|
||||
json={"z_order": -1},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["z_order"] == -1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_z_order_with_other_updates(client: AsyncClient, test_user: User, db: AsyncSession):
|
||||
"""Test updating Z-order along with position and transformations."""
|
||||
board = Board(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
title="Test Board",
|
||||
viewport_state={"x": 0, "y": 0, "zoom": 1.0, "rotation": 0},
|
||||
)
|
||||
db.add(board)
|
||||
|
||||
image = Image(
|
||||
id=uuid4(),
|
||||
user_id=test_user.id,
|
||||
filename="test.jpg",
|
||||
storage_path=f"{test_user.id}/test.jpg",
|
||||
file_size=1024,
|
||||
mime_type="image/jpeg",
|
||||
width=800,
|
||||
height=600,
|
||||
metadata={"format": "jpeg", "checksum": "abc123"},
|
||||
)
|
||||
db.add(image)
|
||||
|
||||
board_image = BoardImage(
|
||||
id=uuid4(),
|
||||
board_id=board.id,
|
||||
image_id=image.id,
|
||||
position={"x": 100, "y": 100},
|
||||
transformations={
|
||||
"scale": 1.0,
|
||||
"rotation": 0,
|
||||
"opacity": 1.0,
|
||||
"flipped_h": False,
|
||||
"flipped_v": False,
|
||||
"greyscale": False,
|
||||
},
|
||||
z_order=0,
|
||||
)
|
||||
db.add(board_image)
|
||||
await db.commit()
|
||||
|
||||
# Update everything including Z-order
|
||||
response = await client.patch(
|
||||
f"/api/images/boards/{board.id}/images/{image.id}",
|
||||
json={
|
||||
"position": {"x": 200, "y": 200},
|
||||
"transformations": {"scale": 2.0},
|
||||
"z_order": 15,
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["position"]["x"] == 200
|
||||
assert data["transformations"]["scale"] == 2.0
|
||||
assert data["z_order"] == 15
|
||||
|
||||
2
backend/tests/auth/__init__.py
Normal file
2
backend/tests/auth/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
"""Auth module tests."""
|
||||
|
||||
314
backend/tests/auth/test_jwt.py
Normal file
314
backend/tests/auth/test_jwt.py
Normal file
@@ -0,0 +1,314 @@
|
||||
"""Unit tests for JWT token generation and validation."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from jose import jwt
|
||||
|
||||
from app.auth.jwt import create_access_token, decode_access_token
|
||||
from app.core.config import settings
|
||||
|
||||
|
||||
class TestCreateAccessToken:
|
||||
"""Test JWT access token creation."""
|
||||
|
||||
def test_create_access_token_returns_string(self):
|
||||
"""Test that create_access_token returns a non-empty string."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
token = create_access_token(user_id, email)
|
||||
|
||||
assert isinstance(token, str)
|
||||
assert len(token) > 0
|
||||
|
||||
def test_create_access_token_contains_user_data(self):
|
||||
"""Test that token contains user ID and email."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
token = create_access_token(user_id, email)
|
||||
|
||||
# Decode without verification to inspect payload
|
||||
payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
|
||||
|
||||
assert payload["sub"] == str(user_id)
|
||||
assert payload["email"] == email
|
||||
|
||||
def test_create_access_token_contains_required_claims(self):
|
||||
"""Test that token contains all required JWT claims."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
token = create_access_token(user_id, email)
|
||||
|
||||
payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
|
||||
|
||||
# Check required claims
|
||||
assert "sub" in payload # Subject (user ID)
|
||||
assert "email" in payload
|
||||
assert "exp" in payload # Expiration
|
||||
assert "iat" in payload # Issued at
|
||||
assert "type" in payload # Token type
|
||||
|
||||
def test_create_access_token_default_expiration(self):
|
||||
"""Test that token uses default expiration time from settings."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
before = datetime.utcnow()
|
||||
token = create_access_token(user_id, email)
|
||||
after = datetime.utcnow()
|
||||
|
||||
payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
|
||||
exp_timestamp = payload["exp"]
|
||||
exp_datetime = datetime.fromtimestamp(exp_timestamp)
|
||||
|
||||
# Calculate expected expiration range
|
||||
min_exp = before + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
max_exp = after + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
|
||||
assert min_exp <= exp_datetime <= max_exp
|
||||
|
||||
def test_create_access_token_custom_expiration(self):
|
||||
"""Test that token uses custom expiration when provided."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
custom_delta = timedelta(hours=2)
|
||||
|
||||
before = datetime.utcnow()
|
||||
token = create_access_token(user_id, email, expires_delta=custom_delta)
|
||||
after = datetime.utcnow()
|
||||
|
||||
payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
|
||||
exp_timestamp = payload["exp"]
|
||||
exp_datetime = datetime.fromtimestamp(exp_timestamp)
|
||||
|
||||
min_exp = before + custom_delta
|
||||
max_exp = after + custom_delta
|
||||
|
||||
assert min_exp <= exp_datetime <= max_exp
|
||||
|
||||
def test_create_access_token_type_is_access(self):
|
||||
"""Test that token type is set to 'access'."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
token = create_access_token(user_id, email)
|
||||
|
||||
payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
|
||||
|
||||
assert payload["type"] == "access"
|
||||
|
||||
def test_create_access_token_different_users_different_tokens(self):
|
||||
"""Test that different users get different tokens."""
|
||||
user1_id = uuid4()
|
||||
user2_id = uuid4()
|
||||
email1 = "user1@example.com"
|
||||
email2 = "user2@example.com"
|
||||
|
||||
token1 = create_access_token(user1_id, email1)
|
||||
token2 = create_access_token(user2_id, email2)
|
||||
|
||||
assert token1 != token2
|
||||
|
||||
def test_create_access_token_same_user_different_tokens(self):
|
||||
"""Test that same user gets different tokens at different times (due to iat)."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
token1 = create_access_token(user_id, email)
|
||||
# Wait a tiny bit to ensure different iat
|
||||
import time
|
||||
|
||||
time.sleep(0.01)
|
||||
token2 = create_access_token(user_id, email)
|
||||
|
||||
# Tokens should be different because iat (issued at) is different
|
||||
assert token1 != token2
|
||||
|
||||
|
||||
class TestDecodeAccessToken:
|
||||
"""Test JWT access token decoding and validation."""
|
||||
|
||||
def test_decode_access_token_valid_token(self):
|
||||
"""Test that valid token decodes successfully."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
token = create_access_token(user_id, email)
|
||||
payload = decode_access_token(token)
|
||||
|
||||
assert payload is not None
|
||||
assert payload["sub"] == str(user_id)
|
||||
assert payload["email"] == email
|
||||
|
||||
def test_decode_access_token_invalid_token(self):
|
||||
"""Test that invalid token returns None."""
|
||||
invalid_tokens = [
|
||||
"invalid.token.here",
|
||||
"not_a_jwt",
|
||||
"",
|
||||
"a.b.c.d.e", # Too many parts
|
||||
]
|
||||
|
||||
for token in invalid_tokens:
|
||||
payload = decode_access_token(token)
|
||||
assert payload is None
|
||||
|
||||
def test_decode_access_token_wrong_secret(self):
|
||||
"""Test that token signed with different secret fails."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
# Create token with different secret
|
||||
wrong_payload = {"sub": str(user_id), "email": email, "exp": datetime.utcnow() + timedelta(minutes=30)}
|
||||
wrong_token = jwt.encode(wrong_payload, "wrong_secret_key", algorithm=settings.ALGORITHM)
|
||||
|
||||
payload = decode_access_token(wrong_token)
|
||||
assert payload is None
|
||||
|
||||
def test_decode_access_token_expired_token(self):
|
||||
"""Test that expired token returns None."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
# Create token that expired 1 hour ago
|
||||
expired_delta = timedelta(hours=-1)
|
||||
token = create_access_token(user_id, email, expires_delta=expired_delta)
|
||||
|
||||
payload = decode_access_token(token)
|
||||
assert payload is None
|
||||
|
||||
def test_decode_access_token_wrong_algorithm(self):
|
||||
"""Test that token with wrong algorithm fails."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
# Create token with different algorithm
|
||||
wrong_payload = {
|
||||
"sub": str(user_id),
|
||||
"email": email,
|
||||
"exp": datetime.utcnow() + timedelta(minutes=30),
|
||||
}
|
||||
# Use HS512 instead of HS256
|
||||
wrong_token = jwt.encode(wrong_payload, settings.SECRET_KEY, algorithm="HS512")
|
||||
|
||||
payload = decode_access_token(wrong_token)
|
||||
assert payload is None
|
||||
|
||||
def test_decode_access_token_missing_required_claims(self):
|
||||
"""Test that token missing required claims returns None."""
|
||||
# Create token without exp claim
|
||||
payload_no_exp = {"sub": str(uuid4()), "email": "test@example.com"}
|
||||
token_no_exp = jwt.encode(payload_no_exp, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
|
||||
|
||||
# jose library will reject tokens without exp when validating
|
||||
payload = decode_access_token(token_no_exp)
|
||||
# This should still decode (jose doesn't require exp by default)
|
||||
# But we document this behavior
|
||||
assert payload is not None or payload is None # Depends on jose version
|
||||
|
||||
def test_decode_access_token_preserves_all_claims(self):
|
||||
"""Test that all claims are preserved in decoded payload."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
token = create_access_token(user_id, email)
|
||||
payload = decode_access_token(token)
|
||||
|
||||
assert payload is not None
|
||||
assert "sub" in payload
|
||||
assert "email" in payload
|
||||
assert "exp" in payload
|
||||
assert "iat" in payload
|
||||
assert "type" in payload
|
||||
assert payload["type"] == "access"
|
||||
|
||||
|
||||
class TestJWTSecurityProperties:
|
||||
"""Test security properties of JWT implementation."""
|
||||
|
||||
def test_jwt_token_is_url_safe(self):
|
||||
"""Test that JWT tokens are URL-safe."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
token = create_access_token(user_id, email)
|
||||
|
||||
# JWT tokens should only contain URL-safe characters
|
||||
import string
|
||||
|
||||
url_safe_chars = string.ascii_letters + string.digits + "-_."
|
||||
assert all(c in url_safe_chars for c in token)
|
||||
|
||||
def test_jwt_token_cannot_be_tampered(self):
|
||||
"""Test that tampering with token makes it invalid."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
token = create_access_token(user_id, email)
|
||||
|
||||
# Try to tamper with token
|
||||
tampered_token = token[:-5] + "XXXXX"
|
||||
|
||||
payload = decode_access_token(tampered_token)
|
||||
assert payload is None
|
||||
|
||||
def test_jwt_user_id_is_string_uuid(self):
|
||||
"""Test that user ID in token is stored as string."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
token = create_access_token(user_id, email)
|
||||
payload = decode_access_token(token)
|
||||
|
||||
assert payload is not None
|
||||
assert isinstance(payload["sub"], str)
|
||||
|
||||
# Should be valid UUID string
|
||||
parsed_uuid = UUID(payload["sub"])
|
||||
assert parsed_uuid == user_id
|
||||
|
||||
def test_jwt_email_preserved_correctly(self):
|
||||
"""Test that email is preserved with correct casing and format."""
|
||||
user_id = uuid4()
|
||||
test_emails = [
|
||||
"test@example.com",
|
||||
"Test.User@Example.COM",
|
||||
"user+tag@domain.co.uk",
|
||||
"first.last@sub.domain.org",
|
||||
]
|
||||
|
||||
for email in test_emails:
|
||||
token = create_access_token(user_id, email)
|
||||
payload = decode_access_token(token)
|
||||
|
||||
assert payload is not None
|
||||
assert payload["email"] == email
|
||||
|
||||
def test_jwt_expiration_is_timestamp(self):
|
||||
"""Test that expiration is stored as Unix timestamp."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
token = create_access_token(user_id, email)
|
||||
payload = decode_access_token(token)
|
||||
|
||||
assert payload is not None
|
||||
assert isinstance(payload["exp"], (int, float))
|
||||
|
||||
# Should be a reasonable timestamp (between 2020 and 2030)
|
||||
assert 1577836800 < payload["exp"] < 1893456000
|
||||
|
||||
def test_jwt_iat_before_exp(self):
|
||||
"""Test that issued-at time is before expiration time."""
|
||||
user_id = uuid4()
|
||||
email = "test@example.com"
|
||||
|
||||
token = create_access_token(user_id, email)
|
||||
payload = decode_access_token(token)
|
||||
|
||||
assert payload is not None
|
||||
assert payload["iat"] < payload["exp"]
|
||||
|
||||
234
backend/tests/auth/test_security.py
Normal file
234
backend/tests/auth/test_security.py
Normal file
@@ -0,0 +1,234 @@
|
||||
"""Unit tests for password hashing and validation."""
|
||||
|
||||
|
||||
from app.auth.security import hash_password, validate_password_strength, verify_password
|
||||
|
||||
|
||||
class TestPasswordHashing:
|
||||
"""Test password hashing functionality."""
|
||||
|
||||
def test_hash_password_returns_string(self):
|
||||
"""Test that hash_password returns a non-empty string."""
|
||||
password = "TestPassword123"
|
||||
hashed = hash_password(password)
|
||||
|
||||
assert isinstance(hashed, str)
|
||||
assert len(hashed) > 0
|
||||
assert hashed != password
|
||||
|
||||
def test_hash_password_generates_unique_hashes(self):
|
||||
"""Test that same password generates different hashes (bcrypt salt)."""
|
||||
password = "TestPassword123"
|
||||
hash1 = hash_password(password)
|
||||
hash2 = hash_password(password)
|
||||
|
||||
assert hash1 != hash2 # Different salts
|
||||
|
||||
def test_hash_password_with_special_characters(self):
|
||||
"""Test hashing passwords with special characters."""
|
||||
password = "P@ssw0rd!#$%"
|
||||
hashed = hash_password(password)
|
||||
|
||||
assert isinstance(hashed, str)
|
||||
assert len(hashed) > 0
|
||||
|
||||
def test_hash_password_with_unicode(self):
|
||||
"""Test hashing passwords with unicode characters."""
|
||||
password = "Pässwörd123"
|
||||
hashed = hash_password(password)
|
||||
|
||||
assert isinstance(hashed, str)
|
||||
assert len(hashed) > 0
|
||||
|
||||
|
||||
class TestPasswordVerification:
|
||||
"""Test password verification functionality."""
|
||||
|
||||
def test_verify_password_correct_password(self):
|
||||
"""Test that correct password verifies successfully."""
|
||||
password = "TestPassword123"
|
||||
hashed = hash_password(password)
|
||||
|
||||
assert verify_password(password, hashed) is True
|
||||
|
||||
def test_verify_password_incorrect_password(self):
|
||||
"""Test that incorrect password fails verification."""
|
||||
password = "TestPassword123"
|
||||
hashed = hash_password(password)
|
||||
|
||||
assert verify_password("WrongPassword123", hashed) is False
|
||||
|
||||
def test_verify_password_case_sensitive(self):
|
||||
"""Test that password verification is case-sensitive."""
|
||||
password = "TestPassword123"
|
||||
hashed = hash_password(password)
|
||||
|
||||
assert verify_password("testpassword123", hashed) is False
|
||||
assert verify_password("TESTPASSWORD123", hashed) is False
|
||||
|
||||
def test_verify_password_empty_string(self):
|
||||
"""Test that empty password fails verification."""
|
||||
password = "TestPassword123"
|
||||
hashed = hash_password(password)
|
||||
|
||||
assert verify_password("", hashed) is False
|
||||
|
||||
def test_verify_password_with_special_characters(self):
|
||||
"""Test verification of passwords with special characters."""
|
||||
password = "P@ssw0rd!#$%"
|
||||
hashed = hash_password(password)
|
||||
|
||||
assert verify_password(password, hashed) is True
|
||||
assert verify_password("P@ssw0rd!#$", hashed) is False # Missing last char
|
||||
|
||||
def test_verify_password_invalid_hash_format(self):
|
||||
"""Test that invalid hash format returns False."""
|
||||
password = "TestPassword123"
|
||||
|
||||
assert verify_password(password, "invalid_hash") is False
|
||||
assert verify_password(password, "") is False
|
||||
|
||||
|
||||
class TestPasswordStrengthValidation:
|
||||
"""Test password strength validation."""
|
||||
|
||||
def test_validate_password_valid_password(self):
|
||||
"""Test that valid passwords pass validation."""
|
||||
valid_passwords = [
|
||||
"Password123",
|
||||
"Abcdef123",
|
||||
"SecureP@ss1",
|
||||
"MyP4ssword",
|
||||
]
|
||||
|
||||
for password in valid_passwords:
|
||||
is_valid, error = validate_password_strength(password)
|
||||
assert is_valid is True, f"Password '{password}' should be valid"
|
||||
assert error == ""
|
||||
|
||||
def test_validate_password_too_short(self):
|
||||
"""Test that passwords shorter than 8 characters fail."""
|
||||
short_passwords = [
|
||||
"Pass1",
|
||||
"Abc123",
|
||||
"Short1A",
|
||||
]
|
||||
|
||||
for password in short_passwords:
|
||||
is_valid, error = validate_password_strength(password)
|
||||
assert is_valid is False
|
||||
assert "at least 8 characters" in error
|
||||
|
||||
def test_validate_password_no_uppercase(self):
|
||||
"""Test that passwords without uppercase letters fail."""
|
||||
passwords = [
|
||||
"password123",
|
||||
"mypassword1",
|
||||
"lowercase8",
|
||||
]
|
||||
|
||||
for password in passwords:
|
||||
is_valid, error = validate_password_strength(password)
|
||||
assert is_valid is False
|
||||
assert "uppercase letter" in error
|
||||
|
||||
def test_validate_password_no_lowercase(self):
|
||||
"""Test that passwords without lowercase letters fail."""
|
||||
passwords = [
|
||||
"PASSWORD123",
|
||||
"MYPASSWORD1",
|
||||
"UPPERCASE8",
|
||||
]
|
||||
|
||||
for password in passwords:
|
||||
is_valid, error = validate_password_strength(password)
|
||||
assert is_valid is False
|
||||
assert "lowercase letter" in error
|
||||
|
||||
def test_validate_password_no_number(self):
|
||||
"""Test that passwords without numbers fail."""
|
||||
passwords = [
|
||||
"Password",
|
||||
"MyPassword",
|
||||
"NoNumbers",
|
||||
]
|
||||
|
||||
for password in passwords:
|
||||
is_valid, error = validate_password_strength(password)
|
||||
assert is_valid is False
|
||||
assert "one number" in error
|
||||
|
||||
def test_validate_password_edge_cases(self):
|
||||
"""Test password validation edge cases."""
|
||||
# Exactly 8 characters, all requirements met
|
||||
is_valid, error = validate_password_strength("Abcdef12")
|
||||
assert is_valid is True
|
||||
assert error == ""
|
||||
|
||||
# Very long password
|
||||
is_valid, error = validate_password_strength("A" * 100 + "a1")
|
||||
assert is_valid is True
|
||||
|
||||
# Empty password
|
||||
is_valid, error = validate_password_strength("")
|
||||
assert is_valid is False
|
||||
|
||||
def test_validate_password_with_special_chars(self):
|
||||
"""Test that special characters don't interfere with validation."""
|
||||
passwords_with_special = [
|
||||
"P@ssw0rd!",
|
||||
"MyP@ss123",
|
||||
"Test#Pass1",
|
||||
]
|
||||
|
||||
for password in passwords_with_special:
|
||||
is_valid, error = validate_password_strength(password)
|
||||
assert is_valid is True, f"Password '{password}' should be valid"
|
||||
assert error == ""
|
||||
|
||||
|
||||
class TestPasswordSecurityProperties:
|
||||
"""Test security properties of password handling."""
|
||||
|
||||
def test_hashed_password_not_reversible(self):
|
||||
"""Test that hashed passwords cannot be easily reversed."""
|
||||
password = "TestPassword123"
|
||||
hashed = hash_password(password)
|
||||
|
||||
# Hash should not contain original password
|
||||
assert password not in hashed
|
||||
assert password.lower() not in hashed.lower()
|
||||
|
||||
def test_different_passwords_different_hashes(self):
|
||||
"""Test that different passwords produce different hashes."""
|
||||
password1 = "TestPassword123"
|
||||
password2 = "TestPassword124" # Only last char different
|
||||
|
||||
hash1 = hash_password(password1)
|
||||
hash2 = hash_password(password2)
|
||||
|
||||
assert hash1 != hash2
|
||||
|
||||
def test_hashed_password_length_consistent(self):
|
||||
"""Test that bcrypt hashes have consistent length."""
|
||||
passwords = ["Short1A", "MediumPassword123", "VeryLongPasswordWithLotsOfCharacters123"]
|
||||
|
||||
hashes = [hash_password(p) for p in passwords]
|
||||
|
||||
# All bcrypt hashes should be 60 characters
|
||||
for hashed in hashes:
|
||||
assert len(hashed) == 60
|
||||
|
||||
def test_verify_handles_timing_attack_resistant(self):
|
||||
"""Test that verification doesn't leak timing information (bcrypt property)."""
|
||||
# This is more of a documentation test - bcrypt is designed to be timing-attack resistant
|
||||
password = "TestPassword123"
|
||||
hashed = hash_password(password)
|
||||
|
||||
# Both should take roughly the same time (bcrypt property)
|
||||
verify_password("WrongPassword123", hashed)
|
||||
verify_password(password, hashed)
|
||||
|
||||
# No actual timing measurement here, just documenting the property
|
||||
assert True
|
||||
|
||||
2
backend/tests/boards/__init__.py
Normal file
2
backend/tests/boards/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
"""Board module tests."""
|
||||
|
||||
442
backend/tests/boards/test_repository.py
Normal file
442
backend/tests/boards/test_repository.py
Normal file
@@ -0,0 +1,442 @@
|
||||
"""Unit tests for board repository."""
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.boards.repository import BoardRepository
|
||||
from app.database.models.board import Board
|
||||
from app.database.models.user import User
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_user(db: Session) -> User:
|
||||
"""Create a test user."""
|
||||
user = User(email="test@example.com", password_hash="hashed_password")
|
||||
db.add(user)
|
||||
db.commit()
|
||||
db.refresh(user)
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def board_repo(db: Session) -> BoardRepository:
|
||||
"""Create a board repository instance."""
|
||||
return BoardRepository(db)
|
||||
|
||||
|
||||
class TestCreateBoard:
|
||||
"""Test board creation."""
|
||||
|
||||
def test_create_board_minimal(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test creating board with only required fields."""
|
||||
board = board_repo.create_board(user_id=test_user.id, title="Test Board")
|
||||
|
||||
assert board.id is not None
|
||||
assert board.user_id == test_user.id
|
||||
assert board.title == "Test Board"
|
||||
assert board.description is None
|
||||
assert board.is_deleted is False
|
||||
assert board.created_at is not None
|
||||
assert board.updated_at is not None
|
||||
|
||||
def test_create_board_with_description(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test creating board with description."""
|
||||
board = board_repo.create_board(
|
||||
user_id=test_user.id, title="Test Board", description="This is a test description"
|
||||
)
|
||||
|
||||
assert board.description == "This is a test description"
|
||||
|
||||
def test_create_board_default_viewport(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test that board is created with default viewport state."""
|
||||
board = board_repo.create_board(user_id=test_user.id, title="Test Board")
|
||||
|
||||
assert board.viewport_state is not None
|
||||
assert board.viewport_state["x"] == 0
|
||||
assert board.viewport_state["y"] == 0
|
||||
assert board.viewport_state["zoom"] == 1.0
|
||||
assert board.viewport_state["rotation"] == 0
|
||||
|
||||
def test_create_board_custom_viewport(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test creating board with custom viewport state."""
|
||||
custom_viewport = {"x": 100, "y": 200, "zoom": 2.0, "rotation": 45}
|
||||
|
||||
board = board_repo.create_board(
|
||||
user_id=test_user.id, title="Test Board", viewport_state=custom_viewport
|
||||
)
|
||||
|
||||
assert board.viewport_state == custom_viewport
|
||||
|
||||
def test_create_multiple_boards(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test creating multiple boards for same user."""
|
||||
board1 = board_repo.create_board(user_id=test_user.id, title="Board 1")
|
||||
board2 = board_repo.create_board(user_id=test_user.id, title="Board 2")
|
||||
board3 = board_repo.create_board(user_id=test_user.id, title="Board 3")
|
||||
|
||||
assert board1.id != board2.id
|
||||
assert board2.id != board3.id
|
||||
assert all(b.user_id == test_user.id for b in [board1, board2, board3])
|
||||
|
||||
|
||||
class TestGetBoardById:
|
||||
"""Test retrieving board by ID."""
|
||||
|
||||
def test_get_existing_board(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test getting existing board owned by user."""
|
||||
created = board_repo.create_board(user_id=test_user.id, title="Test Board")
|
||||
|
||||
retrieved = board_repo.get_board_by_id(board_id=created.id, user_id=test_user.id)
|
||||
|
||||
assert retrieved is not None
|
||||
assert retrieved.id == created.id
|
||||
assert retrieved.title == created.title
|
||||
|
||||
def test_get_nonexistent_board(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test getting board that doesn't exist."""
|
||||
fake_id = uuid4()
|
||||
|
||||
result = board_repo.get_board_by_id(board_id=fake_id, user_id=test_user.id)
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_get_board_wrong_owner(self, board_repo: BoardRepository, test_user: User, db: Session):
|
||||
"""Test that users can't access boards they don't own."""
|
||||
# Create another user
|
||||
other_user = User(email="other@example.com", password_hash="hashed")
|
||||
db.add(other_user)
|
||||
db.commit()
|
||||
db.refresh(other_user)
|
||||
|
||||
# Create board owned by test_user
|
||||
board = board_repo.create_board(user_id=test_user.id, title="Test Board")
|
||||
|
||||
# Try to get with other_user
|
||||
result = board_repo.get_board_by_id(board_id=board.id, user_id=other_user.id)
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_get_deleted_board(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test that soft-deleted boards are not returned."""
|
||||
board = board_repo.create_board(user_id=test_user.id, title="Test Board")
|
||||
|
||||
# Delete the board
|
||||
board_repo.delete_board(board_id=board.id, user_id=test_user.id)
|
||||
|
||||
# Try to get it
|
||||
result = board_repo.get_board_by_id(board_id=board.id, user_id=test_user.id)
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestGetUserBoards:
|
||||
"""Test listing user's boards."""
|
||||
|
||||
def test_get_user_boards_empty(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test getting boards when user has none."""
|
||||
boards, total = board_repo.get_user_boards(user_id=test_user.id)
|
||||
|
||||
assert boards == []
|
||||
assert total == 0
|
||||
|
||||
def test_get_user_boards_multiple(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test getting multiple boards."""
|
||||
board1 = board_repo.create_board(user_id=test_user.id, title="Board 1")
|
||||
board2 = board_repo.create_board(user_id=test_user.id, title="Board 2")
|
||||
board3 = board_repo.create_board(user_id=test_user.id, title="Board 3")
|
||||
|
||||
boards, total = board_repo.get_user_boards(user_id=test_user.id)
|
||||
|
||||
assert len(boards) == 3
|
||||
assert total == 3
|
||||
assert {b.id for b in boards} == {board1.id, board2.id, board3.id}
|
||||
|
||||
def test_get_user_boards_pagination(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test pagination of board list."""
|
||||
# Create 5 boards
|
||||
for i in range(5):
|
||||
board_repo.create_board(user_id=test_user.id, title=f"Board {i}")
|
||||
|
||||
# Get first 2
|
||||
boards_page1, total = board_repo.get_user_boards(user_id=test_user.id, limit=2, offset=0)
|
||||
|
||||
assert len(boards_page1) == 2
|
||||
assert total == 5
|
||||
|
||||
# Get next 2
|
||||
boards_page2, total = board_repo.get_user_boards(user_id=test_user.id, limit=2, offset=2)
|
||||
|
||||
assert len(boards_page2) == 2
|
||||
assert total == 5
|
||||
|
||||
# Ensure no overlap
|
||||
page1_ids = {b.id for b in boards_page1}
|
||||
page2_ids = {b.id for b in boards_page2}
|
||||
assert page1_ids.isdisjoint(page2_ids)
|
||||
|
||||
def test_get_user_boards_sorted_by_update(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test that boards are sorted by updated_at descending."""
|
||||
board1 = board_repo.create_board(user_id=test_user.id, title="Oldest")
|
||||
board2 = board_repo.create_board(user_id=test_user.id, title="Middle")
|
||||
board3 = board_repo.create_board(user_id=test_user.id, title="Newest")
|
||||
|
||||
boards, _ = board_repo.get_user_boards(user_id=test_user.id)
|
||||
|
||||
# Most recently updated should be first
|
||||
assert boards[0].id == board3.id
|
||||
assert boards[1].id == board2.id
|
||||
assert boards[2].id == board1.id
|
||||
|
||||
def test_get_user_boards_excludes_deleted(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test that soft-deleted boards are excluded."""
|
||||
board1 = board_repo.create_board(user_id=test_user.id, title="Board 1")
|
||||
board2 = board_repo.create_board(user_id=test_user.id, title="Board 2")
|
||||
board3 = board_repo.create_board(user_id=test_user.id, title="Board 3")
|
||||
|
||||
# Delete board2
|
||||
board_repo.delete_board(board_id=board2.id, user_id=test_user.id)
|
||||
|
||||
boards, total = board_repo.get_user_boards(user_id=test_user.id)
|
||||
|
||||
assert len(boards) == 2
|
||||
assert total == 2
|
||||
assert {b.id for b in boards} == {board1.id, board3.id}
|
||||
|
||||
def test_get_user_boards_isolation(self, board_repo: BoardRepository, test_user: User, db: Session):
|
||||
"""Test that users only see their own boards."""
|
||||
# Create another user
|
||||
other_user = User(email="other@example.com", password_hash="hashed")
|
||||
db.add(other_user)
|
||||
db.commit()
|
||||
db.refresh(other_user)
|
||||
|
||||
# Create boards for both users
|
||||
test_board = board_repo.create_board(user_id=test_user.id, title="Test Board")
|
||||
other_board = board_repo.create_board(user_id=other_user.id, title="Other Board")
|
||||
|
||||
# Get test_user's boards
|
||||
test_boards, _ = board_repo.get_user_boards(user_id=test_user.id)
|
||||
|
||||
assert len(test_boards) == 1
|
||||
assert test_boards[0].id == test_board.id
|
||||
|
||||
# Get other_user's boards
|
||||
other_boards, _ = board_repo.get_user_boards(user_id=other_user.id)
|
||||
|
||||
assert len(other_boards) == 1
|
||||
assert other_boards[0].id == other_board.id
|
||||
|
||||
|
||||
class TestUpdateBoard:
|
||||
"""Test board updates."""
|
||||
|
||||
def test_update_board_title(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test updating board title."""
|
||||
board = board_repo.create_board(user_id=test_user.id, title="Original Title")
|
||||
|
||||
updated = board_repo.update_board(
|
||||
board_id=board.id, user_id=test_user.id, title="Updated Title"
|
||||
)
|
||||
|
||||
assert updated is not None
|
||||
assert updated.title == "Updated Title"
|
||||
assert updated.id == board.id
|
||||
|
||||
def test_update_board_description(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test updating board description."""
|
||||
board = board_repo.create_board(user_id=test_user.id, title="Test Board")
|
||||
|
||||
updated = board_repo.update_board(
|
||||
board_id=board.id, user_id=test_user.id, description="New description"
|
||||
)
|
||||
|
||||
assert updated is not None
|
||||
assert updated.description == "New description"
|
||||
|
||||
def test_update_board_viewport(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test updating viewport state."""
|
||||
board = board_repo.create_board(user_id=test_user.id, title="Test Board")
|
||||
|
||||
new_viewport = {"x": 100, "y": 200, "zoom": 1.5, "rotation": 90}
|
||||
updated = board_repo.update_board(
|
||||
board_id=board.id, user_id=test_user.id, viewport_state=new_viewport
|
||||
)
|
||||
|
||||
assert updated is not None
|
||||
assert updated.viewport_state == new_viewport
|
||||
|
||||
def test_update_multiple_fields(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test updating multiple fields at once."""
|
||||
board = board_repo.create_board(user_id=test_user.id, title="Original")
|
||||
|
||||
updated = board_repo.update_board(
|
||||
board_id=board.id,
|
||||
user_id=test_user.id,
|
||||
title="Updated Title",
|
||||
description="Updated Description",
|
||||
viewport_state={"x": 50, "y": 50, "zoom": 2.0, "rotation": 45},
|
||||
)
|
||||
|
||||
assert updated is not None
|
||||
assert updated.title == "Updated Title"
|
||||
assert updated.description == "Updated Description"
|
||||
assert updated.viewport_state["zoom"] == 2.0
|
||||
|
||||
def test_update_nonexistent_board(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test updating board that doesn't exist."""
|
||||
fake_id = uuid4()
|
||||
|
||||
result = board_repo.update_board(board_id=fake_id, user_id=test_user.id, title="New Title")
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_update_board_wrong_owner(self, board_repo: BoardRepository, test_user: User, db: Session):
|
||||
"""Test that users can't update boards they don't own."""
|
||||
# Create another user
|
||||
other_user = User(email="other@example.com", password_hash="hashed")
|
||||
db.add(other_user)
|
||||
db.commit()
|
||||
db.refresh(other_user)
|
||||
|
||||
# Create board owned by test_user
|
||||
board = board_repo.create_board(user_id=test_user.id, title="Test Board")
|
||||
|
||||
# Try to update with other_user
|
||||
result = board_repo.update_board(
|
||||
board_id=board.id, user_id=other_user.id, title="Hacked Title"
|
||||
)
|
||||
|
||||
assert result is None
|
||||
|
||||
# Verify original board unchanged
|
||||
original = board_repo.get_board_by_id(board_id=board.id, user_id=test_user.id)
|
||||
assert original.title == "Test Board"
|
||||
|
||||
def test_update_board_partial_update(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test that partial updates don't affect unspecified fields."""
|
||||
board = board_repo.create_board(
|
||||
user_id=test_user.id, title="Original Title", description="Original Description"
|
||||
)
|
||||
|
||||
# Update only title
|
||||
updated = board_repo.update_board(board_id=board.id, user_id=test_user.id, title="New Title")
|
||||
|
||||
assert updated is not None
|
||||
assert updated.title == "New Title"
|
||||
assert updated.description == "Original Description" # Should be unchanged
|
||||
|
||||
|
||||
class TestDeleteBoard:
|
||||
"""Test board deletion."""
|
||||
|
||||
def test_delete_board_success(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test successfully deleting a board."""
|
||||
board = board_repo.create_board(user_id=test_user.id, title="Test Board")
|
||||
|
||||
success = board_repo.delete_board(board_id=board.id, user_id=test_user.id)
|
||||
|
||||
assert success is True
|
||||
|
||||
def test_delete_board_soft_delete(self, board_repo: BoardRepository, test_user: User, db: Session):
|
||||
"""Test that delete is a soft delete (sets flag instead of removing)."""
|
||||
board = board_repo.create_board(user_id=test_user.id, title="Test Board")
|
||||
|
||||
board_repo.delete_board(board_id=board.id, user_id=test_user.id)
|
||||
|
||||
# Board should still exist in database but marked as deleted
|
||||
db_board = db.get(Board, board.id)
|
||||
assert db_board is not None
|
||||
assert db_board.is_deleted is True
|
||||
|
||||
def test_delete_board_not_in_listings(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test that deleted boards don't appear in listings."""
|
||||
board1 = board_repo.create_board(user_id=test_user.id, title="Board 1")
|
||||
board2 = board_repo.create_board(user_id=test_user.id, title="Board 2")
|
||||
|
||||
# Delete board1
|
||||
board_repo.delete_board(board_id=board1.id, user_id=test_user.id)
|
||||
|
||||
boards, total = board_repo.get_user_boards(user_id=test_user.id)
|
||||
|
||||
assert len(boards) == 1
|
||||
assert total == 1
|
||||
assert boards[0].id == board2.id
|
||||
|
||||
def test_delete_nonexistent_board(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test deleting board that doesn't exist."""
|
||||
fake_id = uuid4()
|
||||
|
||||
success = board_repo.delete_board(board_id=fake_id, user_id=test_user.id)
|
||||
|
||||
assert success is False
|
||||
|
||||
def test_delete_board_wrong_owner(self, board_repo: BoardRepository, test_user: User, db: Session):
|
||||
"""Test that users can't delete boards they don't own."""
|
||||
# Create another user
|
||||
other_user = User(email="other@example.com", password_hash="hashed")
|
||||
db.add(other_user)
|
||||
db.commit()
|
||||
db.refresh(other_user)
|
||||
|
||||
# Create board owned by test_user
|
||||
board = board_repo.create_board(user_id=test_user.id, title="Test Board")
|
||||
|
||||
# Try to delete with other_user
|
||||
success = board_repo.delete_board(board_id=board.id, user_id=other_user.id)
|
||||
|
||||
assert success is False
|
||||
|
||||
# Verify board still exists for original owner
|
||||
still_exists = board_repo.get_board_by_id(board_id=board.id, user_id=test_user.id)
|
||||
assert still_exists is not None
|
||||
assert still_exists.is_deleted is False
|
||||
|
||||
|
||||
class TestBoardExists:
|
||||
"""Test board existence check."""
|
||||
|
||||
def test_board_exists_true(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test checking if board exists."""
|
||||
board = board_repo.create_board(user_id=test_user.id, title="Test Board")
|
||||
|
||||
exists = board_repo.board_exists(board_id=board.id, user_id=test_user.id)
|
||||
|
||||
assert exists is True
|
||||
|
||||
def test_board_exists_false(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test checking if board doesn't exist."""
|
||||
fake_id = uuid4()
|
||||
|
||||
exists = board_repo.board_exists(board_id=fake_id, user_id=test_user.id)
|
||||
|
||||
assert exists is False
|
||||
|
||||
def test_board_exists_wrong_owner(self, board_repo: BoardRepository, test_user: User, db: Session):
|
||||
"""Test that board_exists returns False for wrong owner."""
|
||||
# Create another user
|
||||
other_user = User(email="other@example.com", password_hash="hashed")
|
||||
db.add(other_user)
|
||||
db.commit()
|
||||
db.refresh(other_user)
|
||||
|
||||
# Create board owned by test_user
|
||||
board = board_repo.create_board(user_id=test_user.id, title="Test Board")
|
||||
|
||||
# Check with wrong owner
|
||||
exists = board_repo.board_exists(board_id=board.id, user_id=other_user.id)
|
||||
|
||||
assert exists is False
|
||||
|
||||
def test_board_exists_deleted(self, board_repo: BoardRepository, test_user: User):
|
||||
"""Test that deleted boards return False for existence check."""
|
||||
board = board_repo.create_board(user_id=test_user.id, title="Test Board")
|
||||
|
||||
# Delete board
|
||||
board_repo.delete_board(board_id=board.id, user_id=test_user.id)
|
||||
|
||||
# Check existence
|
||||
exists = board_repo.board_exists(board_id=board.id, user_id=test_user.id)
|
||||
|
||||
assert exists is False
|
||||
|
||||
209
backend/tests/conftest.py
Normal file
209
backend/tests/conftest.py
Normal file
@@ -0,0 +1,209 @@
|
||||
"""Pytest configuration and fixtures for all tests."""
|
||||
|
||||
from collections.abc import Generator
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
from sqlalchemy.pool import StaticPool
|
||||
|
||||
from app.core.deps import get_db
|
||||
from app.database.base import Base
|
||||
from app.main import app
|
||||
|
||||
# Use in-memory SQLite for tests
|
||||
SQLALCHEMY_DATABASE_URL = "sqlite:///:memory:"
|
||||
|
||||
engine = create_engine(
|
||||
SQLALCHEMY_DATABASE_URL,
|
||||
connect_args={"check_same_thread": False},
|
||||
poolclass=StaticPool,
|
||||
)
|
||||
|
||||
TestingSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def db() -> Generator[Session, None, None]:
|
||||
"""
|
||||
Create a fresh database for each test.
|
||||
|
||||
Yields:
|
||||
Database session
|
||||
"""
|
||||
# Create all tables
|
||||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
# Create session
|
||||
session = TestingSessionLocal()
|
||||
|
||||
try:
|
||||
yield session
|
||||
finally:
|
||||
session.close()
|
||||
# Drop all tables after test
|
||||
Base.metadata.drop_all(bind=engine)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def client(db: Session) -> Generator[TestClient, None, None]:
|
||||
"""
|
||||
Create a test client with database override.
|
||||
|
||||
Args:
|
||||
db: Test database session
|
||||
|
||||
Yields:
|
||||
FastAPI test client
|
||||
"""
|
||||
|
||||
def override_get_db():
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
pass
|
||||
|
||||
app.dependency_overrides[get_db] = override_get_db
|
||||
|
||||
with TestClient(app) as test_client:
|
||||
yield test_client
|
||||
|
||||
app.dependency_overrides.clear()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_user_data() -> dict:
|
||||
"""
|
||||
Standard test user data.
|
||||
|
||||
Returns:
|
||||
Dictionary with test user credentials
|
||||
"""
|
||||
return {"email": "test@example.com", "password": "TestPassword123"}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_user_data_weak_password() -> dict:
|
||||
"""
|
||||
Test user data with weak password.
|
||||
|
||||
Returns:
|
||||
Dictionary with weak password
|
||||
"""
|
||||
return {"email": "test@example.com", "password": "weak"}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_user_data_no_uppercase() -> dict:
|
||||
"""
|
||||
Test user data with no uppercase letter.
|
||||
|
||||
Returns:
|
||||
Dictionary with invalid password
|
||||
"""
|
||||
return {"email": "test@example.com", "password": "testpassword123"}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_user(client: TestClient, test_user_data: dict):
|
||||
"""
|
||||
Create and return a test user.
|
||||
|
||||
Args:
|
||||
client: Test client
|
||||
test_user_data: User credentials
|
||||
|
||||
Returns:
|
||||
User object
|
||||
"""
|
||||
from app.database.models.user import User
|
||||
|
||||
response = client.post("/api/v1/auth/register", json=test_user_data)
|
||||
user_id = response.json()["id"]
|
||||
|
||||
# Get user from database (use same db session)
|
||||
from app.core.deps import get_db
|
||||
|
||||
db_gen = next(app.dependency_overrides[get_db]())
|
||||
user = db_gen.query(User).filter(User.id == user_id).first()
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def auth_headers(client: TestClient, test_user_data: dict) -> dict:
|
||||
"""
|
||||
Create authenticated headers with JWT token.
|
||||
|
||||
Args:
|
||||
client: Test client
|
||||
test_user_data: User credentials
|
||||
|
||||
Returns:
|
||||
Dictionary with Authorization header
|
||||
"""
|
||||
# Register and login
|
||||
client.post("/api/v1/auth/register", json=test_user_data)
|
||||
login_response = client.post("/api/v1/auth/login", json=test_user_data)
|
||||
token = login_response.json()["access_token"]
|
||||
return {"Authorization": f"Bearer {token}"}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def other_user_data() -> dict:
|
||||
"""
|
||||
Data for a second test user.
|
||||
|
||||
Returns:
|
||||
Dictionary with test user credentials
|
||||
"""
|
||||
return {"email": "other@example.com", "password": "OtherPassword123"}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def other_auth_headers(client: TestClient, other_user_data: dict) -> dict:
|
||||
"""
|
||||
Create authenticated headers for a second user.
|
||||
|
||||
Args:
|
||||
client: Test client
|
||||
other_user_data: Other user credentials
|
||||
|
||||
Returns:
|
||||
Dictionary with Authorization header
|
||||
"""
|
||||
# Register and login
|
||||
client.post("/api/v1/auth/register", json=other_user_data)
|
||||
login_response = client.post("/api/v1/auth/login", json=other_user_data)
|
||||
token = login_response.json()["access_token"]
|
||||
return {"Authorization": f"Bearer {token}"}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_board(client: TestClient, auth_headers: dict):
|
||||
"""
|
||||
Create a test board.
|
||||
|
||||
Args:
|
||||
client: Test client
|
||||
auth_headers: Authentication headers
|
||||
|
||||
Returns:
|
||||
Board object
|
||||
"""
|
||||
from app.database.models.board import Board
|
||||
|
||||
response = client.post(
|
||||
"/api/v1/boards",
|
||||
json={"title": "Test Board", "description": "Test description"},
|
||||
headers=auth_headers,
|
||||
)
|
||||
board_id = response.json()["id"]
|
||||
|
||||
# Get board from database
|
||||
from app.core.deps import get_db
|
||||
|
||||
db_gen = next(app.dependency_overrides[get_db]())
|
||||
board = db_gen.query(Board).filter(Board.id == board_id).first()
|
||||
return board
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user