Add initial project configuration and setup for Reference Board Viewer application. Include EditorConfig for consistent coding styles, pre-commit hooks for linting and formatting, Docker Compose for local development with PostgreSQL and MinIO, and a Nix flake for development environment management. Establish CI/CD pipeline for automated testing and deployment.

This commit is contained in:
Danilo Reyes
2025-11-01 22:28:46 -06:00
parent 58f463867e
commit 1bc657e0fd
33 changed files with 1756 additions and 38 deletions

34
.editorconfig Normal file
View File

@@ -0,0 +1,34 @@
# EditorConfig for Reference Board Viewer
# https://editorconfig.org
root = true
[*]
charset = utf-8
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
[*.{js,jsx,ts,tsx,svelte}]
indent_style = space
indent_size = 2
[*.{py}]
indent_style = space
indent_size = 4
max_line_length = 100
[*.{json,yaml,yml}]
indent_style = space
indent_size = 2
[*.{md,markdown}]
trim_trailing_whitespace = false
[Makefile]
indent_style = tab
[*.nix]
indent_style = space
indent_size = 2

2
.envrc
View File

@@ -1 +1 @@
use nix
use flake

181
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,181 @@
name: CI/CD Pipeline
on:
push:
branches: [main, develop, '**']
pull_request:
branches: [main, develop]
jobs:
backend-tests:
name: Backend Tests
runs-on: ubuntu-latest
services:
postgres:
image: postgres:16
env:
POSTGRES_DB: webref_test
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
steps:
- uses: actions/checkout@v4
- name: Install Nix
uses: cachix/install-nix-action@v27
with:
nix_path: nixpkgs=channel:nixos-unstable
- name: Setup Python dependencies
run: |
cd backend
python -m pip install --upgrade pip
pip install -e ".[dev]"
- name: Run Ruff linter
run: |
cd backend
ruff check app/
- name: Run Ruff formatter check
run: |
cd backend
ruff format --check app/
- name: Run tests with coverage
env:
DATABASE_URL: postgresql://postgres:postgres@localhost:5432/webref_test
run: |
cd backend
pytest --cov=app --cov-report=xml --cov-report=term
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4
with:
file: ./backend/coverage.xml
flags: backend
name: backend-coverage
frontend-tests:
name: Frontend Tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
cache: 'npm'
cache-dependency-path: frontend/package-lock.json
- name: Install dependencies
run: |
cd frontend
npm ci
- name: Run ESLint
run: |
cd frontend
npm run lint
- name: Run Prettier check
run: |
cd frontend
npx prettier --check .
- name: Run Svelte check
run: |
cd frontend
npm run check
- name: Run tests with coverage
run: |
cd frontend
npm run test:coverage
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4
with:
file: ./frontend/coverage/coverage-final.json
flags: frontend
name: frontend-coverage
integration-tests:
name: Integration Tests
runs-on: ubuntu-latest
needs: [backend-tests, frontend-tests]
services:
postgres:
image: postgres:16
env:
POSTGRES_DB: webref_test
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
minio:
image: minio/minio
env:
MINIO_ROOT_USER: minioadmin
MINIO_ROOT_PASSWORD: minioadmin
ports:
- 9000:9000
options: >-
--health-cmd "curl -f http://localhost:9000/minio/health/live"
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v4
- name: Install Nix
uses: cachix/install-nix-action@v27
with:
nix_path: nixpkgs=channel:nixos-unstable
- name: Run integration tests
env:
DATABASE_URL: postgresql://postgres:postgres@localhost:5432/webref_test
MINIO_ENDPOINT: localhost:9000
MINIO_ACCESS_KEY: minioadmin
MINIO_SECRET_KEY: minioadmin
run: |
cd backend
pytest tests/integration/
nix-build:
name: Nix Build Check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Nix
uses: cachix/install-nix-action@v27
with:
nix_path: nixpkgs=channel:nixos-unstable
- name: Check flake
run: nix flake check
- name: Build dev shell
run: nix develop --command echo "Dev shell OK"

36
.gitignore vendored
View File

@@ -44,6 +44,42 @@ env/
result
result-*
# Node.js / JavaScript
node_modules/
package-lock.json
pnpm-lock.yaml
yarn.lock
.npm
npm-debug.log*
yarn-debug.log*
yarn-error.log*
dist/
.svelte-kit/
# Environment files
.env
.env.local
.env.*.local
*.log
# Database
pgdata/
*.sql
*.db
*.sqlite
# MinIO / Storage
minio-data/
# Backend specific
backend/.uv/
backend/alembic/versions/__pycache__/
# Frontend specific
frontend/build/
frontend/.svelte-kit/
frontend/dist/
# Project specific
.specify/plans/*
.specify/specs/*

54
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,54 @@
repos:
# Python linting and formatting
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.0
hooks:
- id: ruff
args: [--fix]
files: ^backend/
- id: ruff-format
files: ^backend/
# JavaScript/TypeScript linting
- repo: https://github.com/pre-commit/mirrors-eslint
rev: v9.15.0
hooks:
- id: eslint
files: \.(js|ts|svelte)$
args: [--fix]
additional_dependencies:
- eslint@8.56.0
- eslint-plugin-svelte@2.35.1
- eslint-config-prettier@9.1.0
- "@typescript-eslint/eslint-plugin@7.0.0"
- "@typescript-eslint/parser@7.0.0"
# Prettier for formatting
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v4.0.0-alpha.8
hooks:
- id: prettier
files: \.(js|ts|json|yaml|yml|md|svelte)$
additional_dependencies:
- prettier@3.2.5
- prettier-plugin-svelte@3.1.2
# General file checks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-json
- id: check-added-large-files
args: [--maxkb=5000]
- id: check-merge-conflict
- id: detect-private-key
# Nix formatting
- repo: https://github.com/nix-community/nixpkgs-fmt
rev: v1.3.0
hooks:
- id: nixpkgs-fmt

115
backend/alembic.ini Normal file
View File

@@ -0,0 +1,115 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d%%(second).2d_%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# format using "ruff" - use the exec runner, execute a binary
hooks = ruff
ruff.type = exec
ruff.executable = ruff
ruff.options = format REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

92
backend/alembic/env.py Normal file
View File

@@ -0,0 +1,92 @@
from logging.config import fileConfig
import os
import sys
from pathlib import Path
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# Add parent directory to path to import app modules
sys.path.insert(0, str(Path(__file__).parent.parent))
# Import all models here for autogenerate to detect them
from app.database.base import Base # noqa
from app.database.models import * # noqa
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
target_metadata = Base.metadata
# Get database URL from environment or config
database_url = os.getenv("DATABASE_URL")
if database_url:
config.set_main_option("sqlalchemy.url", database_url)
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
compare_type=True,
compare_server_default=True,
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
compare_server_default=True,
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,27 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

4
backend/app/__init__.py Normal file
View File

@@ -0,0 +1,4 @@
"""Reference Board Viewer - Backend API."""
__version__ = "1.0.0"

View File

@@ -0,0 +1,2 @@
"""API endpoints."""

View File

@@ -0,0 +1,2 @@
"""Core application modules."""

View File

@@ -0,0 +1,93 @@
"""Application configuration."""
from functools import lru_cache
from typing import Any
from pydantic import PostgresDsn, field_validator
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
"""Application settings."""
model_config = SettingsConfigDict(
env_file=".env",
env_file_encoding="utf-8",
case_sensitive=False,
extra="ignore",
)
# Application
APP_NAME: str = "Reference Board Viewer"
APP_VERSION: str = "1.0.0"
DEBUG: bool = False
API_V1_PREFIX: str = "/api/v1"
# Database
DATABASE_URL: PostgresDsn
DATABASE_POOL_SIZE: int = 20
DATABASE_MAX_OVERFLOW: int = 0
# JWT Authentication
SECRET_KEY: str
ALGORITHM: str = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
# MinIO Storage
MINIO_ENDPOINT: str
MINIO_ACCESS_KEY: str
MINIO_SECRET_KEY: str
MINIO_BUCKET: str = "webref"
MINIO_SECURE: bool = False
# CORS
CORS_ORIGINS: list[str] = ["http://localhost:5173", "http://localhost:3000"]
@field_validator("CORS_ORIGINS", mode="before")
@classmethod
def parse_cors_origins(cls, v: Any) -> list[str]:
"""Parse CORS origins from string or list."""
if isinstance(v, str):
return [origin.strip() for origin in v.split(",")]
return v
# File Upload
MAX_FILE_SIZE: int = 52428800 # 50MB
MAX_BATCH_SIZE: int = 524288000 # 500MB
ALLOWED_MIME_TYPES: list[str] = [
"image/jpeg",
"image/png",
"image/gif",
"image/webp",
"image/svg+xml",
]
@field_validator("ALLOWED_MIME_TYPES", mode="before")
@classmethod
def parse_mime_types(cls, v: Any) -> list[str]:
"""Parse MIME types from string or list."""
if isinstance(v, str):
return [mime.strip() for mime in v.split(",")]
return v
# Performance
REQUEST_TIMEOUT: int = 30
MAX_CONCURRENT_UPLOADS: int = 10
# Security
BCRYPT_ROUNDS: int = 12
PASSWORD_MIN_LENGTH: int = 8
# Logging
LOG_LEVEL: str = "INFO"
@lru_cache
def get_settings() -> Settings:
"""Get cached application settings."""
return Settings()
# Export settings instance
settings = get_settings()

12
backend/app/core/deps.py Normal file
View File

@@ -0,0 +1,12 @@
"""Dependency injection utilities."""
from typing import Annotated, Generator
from fastapi import Depends
from sqlalchemy.orm import Session
from app.database.session import get_db
# Database session dependency
DatabaseSession = Annotated[Session, Depends(get_db)]

View File

@@ -0,0 +1,68 @@
"""Custom exception classes."""
from typing import Any
class WebRefException(Exception):
"""Base exception for all custom exceptions."""
def __init__(self, message: str, status_code: int = 500, details: dict[str, Any] | None = None):
self.message = message
self.status_code = status_code
self.details = details or {}
super().__init__(self.message)
class ValidationError(WebRefException):
"""Validation error."""
def __init__(self, message: str, details: dict[str, Any] | None = None):
super().__init__(message, status_code=422, details=details)
class AuthenticationError(WebRefException):
"""Authentication error."""
def __init__(self, message: str = "Authentication failed"):
super().__init__(message, status_code=401)
class AuthorizationError(WebRefException):
"""Authorization error."""
def __init__(self, message: str = "Insufficient permissions"):
super().__init__(message, status_code=403)
class NotFoundError(WebRefException):
"""Resource not found error."""
def __init__(self, resource: str, resource_id: str | None = None):
message = f"{resource} not found"
if resource_id:
message = f"{resource} with id {resource_id} not found"
super().__init__(message, status_code=404)
class ConflictError(WebRefException):
"""Resource conflict error."""
def __init__(self, message: str):
super().__init__(message, status_code=409)
class FileTooLargeError(WebRefException):
"""File size exceeds limit."""
def __init__(self, max_size: int):
message = f"File size exceeds maximum allowed size of {max_size} bytes"
super().__init__(message, status_code=413)
class UnsupportedFileTypeError(WebRefException):
"""Unsupported file type."""
def __init__(self, file_type: str, allowed_types: list[str]):
message = f"File type '{file_type}' not supported. Allowed types: {', '.join(allowed_types)}"
super().__init__(message, status_code=415)

View File

@@ -0,0 +1,34 @@
"""Logging configuration."""
import logging
import sys
from app.core.config import settings
def setup_logging() -> None:
"""Configure application logging."""
# Get log level from settings
log_level = getattr(logging, settings.LOG_LEVEL.upper(), logging.INFO)
# Configure root logger
logging.basicConfig(
level=log_level,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
handlers=[
logging.StreamHandler(sys.stdout)
],
)
# Set library log levels
logging.getLogger("uvicorn").setLevel(logging.INFO)
logging.getLogger("uvicorn.access").setLevel(logging.INFO)
logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
logging.getLogger("boto3").setLevel(logging.WARNING)
logging.getLogger("botocore").setLevel(logging.WARNING)
logger = logging.getLogger(__name__)
logger.info(f"Logging configured with level: {settings.LOG_LEVEL}")

View File

@@ -0,0 +1,29 @@
"""CORS and other middleware configuration."""
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.middleware.trustedhost import TrustedHostMiddleware
from app.core.config import settings
def setup_middleware(app: FastAPI) -> None:
"""Configure application middleware."""
# CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=settings.CORS_ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Security headers (optional, add more as needed)
# Note: TrustedHostMiddleware not added by default in dev
# Uncomment for production:
# app.add_middleware(
# TrustedHostMiddleware,
# allowed_hosts=["yourdomain.com", "*.yourdomain.com"]
# )

View File

@@ -0,0 +1,64 @@
"""Base Pydantic schemas."""
from datetime import datetime
from typing import Any
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field
class BaseSchema(BaseModel):
"""Base schema with common configuration."""
model_config = ConfigDict(
from_attributes=True,
populate_by_name=True,
json_schema_extra={
"example": {}
}
)
class TimestampSchema(BaseSchema):
"""Schema with timestamp fields."""
created_at: datetime = Field(..., description="Creation timestamp")
updated_at: datetime | None = Field(None, description="Last update timestamp")
class IDSchema(BaseSchema):
"""Schema with ID field."""
id: UUID = Field(..., description="Unique identifier")
class ResponseSchema(BaseSchema):
"""Generic response schema."""
message: str = Field(..., description="Response message")
data: dict[str, Any] | None = Field(None, description="Response data")
class ErrorSchema(BaseSchema):
"""Error response schema."""
error: str = Field(..., description="Error message")
details: dict[str, Any] | None = Field(None, description="Error details")
status_code: int = Field(..., description="HTTP status code")
class PaginationSchema(BaseSchema):
"""Pagination metadata schema."""
total: int = Field(..., description="Total number of items")
page: int = Field(..., description="Current page number")
page_size: int = Field(..., description="Items per page")
total_pages: int = Field(..., description="Total number of pages")
class PaginatedResponse(BaseSchema):
"""Paginated response schema."""
items: list[Any] = Field(..., description="List of items")
pagination: PaginationSchema = Field(..., description="Pagination metadata")

119
backend/app/core/storage.py Normal file
View File

@@ -0,0 +1,119 @@
"""MinIO storage client utilities."""
import logging
from io import BytesIO
from typing import BinaryIO
import boto3
from botocore.client import Config
from botocore.exceptions import ClientError
from app.core.config import settings
logger = logging.getLogger(__name__)
class StorageClient:
"""MinIO storage client wrapper."""
def __init__(self):
"""Initialize MinIO client."""
self.client = boto3.client(
"s3",
endpoint_url=f"{'https' if settings.MINIO_SECURE else 'http'}://{settings.MINIO_ENDPOINT}",
aws_access_key_id=settings.MINIO_ACCESS_KEY,
aws_secret_access_key=settings.MINIO_SECRET_KEY,
config=Config(signature_version="s3v4"),
)
self.bucket = settings.MINIO_BUCKET
self._ensure_bucket_exists()
def _ensure_bucket_exists(self) -> None:
"""Create bucket if it doesn't exist."""
try:
self.client.head_bucket(Bucket=self.bucket)
except ClientError:
logger.info(f"Creating bucket: {self.bucket}")
self.client.create_bucket(Bucket=self.bucket)
def upload_file(self, file_data: BinaryIO, object_name: str, content_type: str) -> str:
"""Upload file to MinIO.
Args:
file_data: File data to upload
object_name: S3 object name (path)
content_type: MIME type of the file
Returns:
str: Object URL
Raises:
Exception: If upload fails
"""
try:
self.client.upload_fileobj(
file_data,
self.bucket,
object_name,
ExtraArgs={"ContentType": content_type},
)
return f"{settings.MINIO_ENDPOINT}/{self.bucket}/{object_name}"
except ClientError as e:
logger.error(f"Failed to upload file {object_name}: {e}")
raise
def download_file(self, object_name: str) -> BytesIO:
"""Download file from MinIO.
Args:
object_name: S3 object name (path)
Returns:
BytesIO: File data
Raises:
Exception: If download fails
"""
try:
file_data = BytesIO()
self.client.download_fileobj(self.bucket, object_name, file_data)
file_data.seek(0)
return file_data
except ClientError as e:
logger.error(f"Failed to download file {object_name}: {e}")
raise
def delete_file(self, object_name: str) -> None:
"""Delete file from MinIO.
Args:
object_name: S3 object name (path)
Raises:
Exception: If deletion fails
"""
try:
self.client.delete_object(Bucket=self.bucket, Key=object_name)
except ClientError as e:
logger.error(f"Failed to delete file {object_name}: {e}")
raise
def file_exists(self, object_name: str) -> bool:
"""Check if file exists in MinIO.
Args:
object_name: S3 object name (path)
Returns:
bool: True if file exists, False otherwise
"""
try:
self.client.head_object(Bucket=self.bucket, Key=object_name)
return True
except ClientError:
return False
# Global storage client instance
storage_client = StorageClient()

View File

@@ -0,0 +1,2 @@
"""Database models and session management."""

View File

@@ -0,0 +1,30 @@
"""Base model for all database models."""
from datetime import datetime
from typing import Any
from uuid import uuid4
from sqlalchemy import Column, DateTime
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import DeclarativeBase, declared_attr
class Base(DeclarativeBase):
"""Base class for all database models."""
# Generate __tablename__ automatically from class name
@declared_attr.directive
def __tablename__(cls) -> str:
"""Generate table name from class name."""
# Convert CamelCase to snake_case
name = cls.__name__
return "".join(["_" + c.lower() if c.isupper() else c for c in name]).lstrip("_")
# Common columns for all models
id: Any = Column(UUID(as_uuid=True), primary_key=True, default=uuid4)
created_at: Any = Column(DateTime, default=datetime.utcnow, nullable=False)
def dict(self) -> dict[str, Any]:
"""Convert model to dictionary."""
return {c.name: getattr(self, c.name) for c in self.__table__.columns}

View File

@@ -0,0 +1,5 @@
"""Database models."""
# Import all models here for Alembic autogenerate
# Models will be created in separate phases

View File

@@ -0,0 +1,28 @@
"""Database session management."""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from app.core.config import settings
# Create SQLAlchemy engine
engine = create_engine(
str(settings.DATABASE_URL),
pool_size=settings.DATABASE_POOL_SIZE,
max_overflow=settings.DATABASE_MAX_OVERFLOW,
pool_pre_ping=True, # Verify connections before using
echo=settings.DEBUG, # Log SQL queries in debug mode
)
# Create session factory
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
def get_db():
"""Dependency for getting database session."""
db = SessionLocal()
try:
yield db
finally:
db.close()

102
backend/app/main.py Normal file
View File

@@ -0,0 +1,102 @@
"""FastAPI application entry point."""
import logging
from fastapi import FastAPI, Request
from fastapi.responses import JSONResponse
from app.core.config import settings
from app.core.errors import WebRefException
from app.core.logging import setup_logging
from app.core.middleware import setup_middleware
# Setup logging
setup_logging()
logger = logging.getLogger(__name__)
# Create FastAPI application
app = FastAPI(
title=settings.APP_NAME,
version=settings.APP_VERSION,
description="Reference Board Viewer - Web-based visual reference management",
docs_url="/docs",
redoc_url="/redoc",
openapi_url=f"{settings.API_V1_PREFIX}/openapi.json",
)
# Setup middleware
setup_middleware(app)
# Exception handlers
@app.exception_handler(WebRefException)
async def webref_exception_handler(request: Request, exc: WebRefException):
"""Handle custom WebRef exceptions."""
logger.error(f"WebRef exception: {exc.message}", extra={"details": exc.details})
return JSONResponse(
status_code=exc.status_code,
content={
"error": exc.message,
"details": exc.details,
"status_code": exc.status_code,
},
)
@app.exception_handler(Exception)
async def general_exception_handler(request: Request, exc: Exception):
"""Handle unexpected exceptions."""
logger.exception("Unexpected error occurred")
return JSONResponse(
status_code=500,
content={
"error": "Internal server error",
"details": str(exc) if settings.DEBUG else {},
"status_code": 500,
},
)
# Health check endpoint
@app.get("/health", tags=["System"])
async def health_check():
"""Health check endpoint."""
return {
"status": "healthy",
"version": settings.APP_VERSION,
"app": settings.APP_NAME,
}
# Root endpoint
@app.get("/", tags=["System"])
async def root():
"""Root endpoint with API information."""
return {
"message": f"Welcome to {settings.APP_NAME} API",
"version": settings.APP_VERSION,
"docs": "/docs",
"health": "/health",
}
# API routers will be added here in subsequent phases
# Example:
# from app.api import auth, boards, images
# app.include_router(auth.router, prefix=f"{settings.API_V1_PREFIX}/auth", tags=["Auth"])
# app.include_router(boards.router, prefix=f"{settings.API_V1_PREFIX}/boards", tags=["Boards"])
@app.on_event("startup")
async def startup_event():
"""Application startup tasks."""
logger.info(f"Starting {settings.APP_NAME} v{settings.APP_VERSION}")
logger.info(f"Debug mode: {settings.DEBUG}")
logger.info(f"API prefix: {settings.API_V1_PREFIX}")
@app.on_event("shutdown")
async def shutdown_event():
"""Application shutdown tasks."""
logger.info(f"Shutting down {settings.APP_NAME}")

84
backend/pyproject.toml Normal file
View File

@@ -0,0 +1,84 @@
[project]
name = "webref-backend"
version = "1.0.0"
description = "Reference Board Viewer - Backend API"
readme = "README.md"
requires-python = ">=3.12"
dependencies = [
"fastapi>=0.115.0",
"uvicorn[standard]>=0.32.0",
"sqlalchemy>=2.0.0",
"alembic>=1.13.0",
"pydantic>=2.9.0",
"pydantic-settings>=2.6.0",
"python-jose[cryptography]>=3.3.0",
"passlib[bcrypt]>=1.7.4",
"pillow>=11.0.0",
"boto3>=1.35.0",
"python-multipart>=0.0.12",
"httpx>=0.27.0",
]
[project.optional-dependencies]
dev = [
"pytest>=8.3.0",
"pytest-cov>=6.0.0",
"pytest-asyncio>=0.24.0",
"ruff>=0.7.0",
]
[build-system]
requires = ["setuptools>=61.0"]
build-backend = "setuptools.build_meta"
[tool.ruff]
# Enable pycodestyle (`E`), Pyflakes (`F`), isort (`I`)
select = ["E", "F", "I", "W", "N", "UP", "B", "C4", "SIM"]
ignore = []
# Exclude common paths
exclude = [
".git",
".ruff_cache",
".venv",
"__pycache__",
"alembic/versions",
]
# Same as Black.
line-length = 100
# Allow unused variables when underscore-prefixed.
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
# Target Python 3.12
target-version = "py312"
[tool.ruff.per-file-ignores]
"__init__.py" = ["F401"] # Allow unused imports in __init__.py
"tests/*" = ["S101"] # Allow assert in tests
[tool.pytest.ini_options]
testpaths = ["tests"]
python_files = "test_*.py"
python_classes = "Test*"
python_functions = "test_*"
addopts = [
"--strict-markers",
"--tb=short",
"--cov=app",
"--cov-report=term-missing",
"--cov-report=html",
"--cov-fail-under=80",
]
asyncio_mode = "auto"
[tool.coverage.run]
source = ["app"]
omit = ["tests/*", "alembic/*"]
[tool.coverage.report]
precision = 2
show_missing = true
skip_covered = false

54
backend/pytest.ini Normal file
View File

@@ -0,0 +1,54 @@
[pytest]
# Test discovery
testpaths = tests
python_files = test_*.py
python_classes = Test*
python_functions = test_*
# Output options
addopts =
--strict-markers
--tb=short
--cov=app
--cov-report=term-missing:skip-covered
--cov-report=html
--cov-report=xml
--cov-fail-under=80
-v
--color=yes
# Async support
asyncio_mode = auto
# Markers
markers =
slow: marks tests as slow (deselect with '-m "not slow"')
integration: marks tests as integration tests
unit: marks tests as unit tests
auth: marks tests related to authentication
boards: marks tests related to boards
images: marks tests related to images
upload: marks tests related to file uploads
# Coverage options
[coverage:run]
source = app
omit =
tests/*
alembic/*
app/__init__.py
*/migrations/*
[coverage:report]
precision = 2
show_missing = true
skip_covered = false
exclude_lines =
pragma: no cover
def __repr__
raise AssertionError
raise NotImplementedError
if __name__ == .__main__.:
if TYPE_CHECKING:
@abstractmethod

115
docker-compose.dev.yml Normal file
View File

@@ -0,0 +1,115 @@
version: '3.8'
services:
# PostgreSQL Database
postgres:
image: postgres:16-alpine
container_name: webref-postgres
environment:
POSTGRES_DB: webref
POSTGRES_USER: webref
POSTGRES_PASSWORD: webref_dev_password
POSTGRES_INITDB_ARGS: "--encoding=UTF8 --locale=C"
ports:
- "5432:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U webref"]
interval: 10s
timeout: 5s
retries: 5
networks:
- webref-network
# MinIO Object Storage
minio:
image: minio/minio:latest
container_name: webref-minio
command: server /data --console-address ":9001"
environment:
MINIO_ROOT_USER: minioadmin
MINIO_ROOT_PASSWORD: minioadmin
ports:
- "9000:9000" # API
- "9001:9001" # Console UI
volumes:
- minio_data:/data
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 10s
timeout: 5s
retries: 5
networks:
- webref-network
# MinIO Client - Create buckets on startup
minio-init:
image: minio/mc:latest
container_name: webref-minio-init
depends_on:
minio:
condition: service_healthy
entrypoint: >
/bin/sh -c "
/usr/bin/mc alias set myminio http://minio:9000 minioadmin minioadmin;
/usr/bin/mc mb myminio/webref --ignore-existing;
/usr/bin/mc policy set public myminio/webref;
exit 0;
"
networks:
- webref-network
# Redis (optional - for caching/background tasks)
redis:
image: redis:7-alpine
container_name: webref-redis
ports:
- "6379:6379"
volumes:
- redis_data:/data
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 5
networks:
- webref-network
# pgAdmin (optional - database management UI)
pgadmin:
image: dpage/pgadmin4:latest
container_name: webref-pgadmin
environment:
PGADMIN_DEFAULT_EMAIL: admin@webref.local
PGADMIN_DEFAULT_PASSWORD: admin
PGADMIN_CONFIG_SERVER_MODE: 'False'
ports:
- "5050:80"
volumes:
- pgadmin_data:/var/lib/pgadmin
depends_on:
- postgres
networks:
- webref-network
volumes:
postgres_data:
driver: local
minio_data:
driver: local
redis_data:
driver: local
pgadmin_data:
driver: local
networks:
webref-network:
driver: bridge
# Usage:
# Start all services: docker-compose -f docker-compose.dev.yml up -d
# Stop all services: docker-compose -f docker-compose.dev.yml down
# View logs: docker-compose -f docker-compose.dev.yml logs -f
# Reset volumes: docker-compose -f docker-compose.dev.yml down -v

135
flake.nix Normal file
View File

@@ -0,0 +1,135 @@
{
description = "Reference Board Viewer - Web-based visual reference management";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
};
outputs = { self, nixpkgs, flake-utils }:
flake-utils.lib.eachDefaultSystem (system:
let
pkgs = nixpkgs.legacyPackages.${system};
pythonEnv = pkgs.python3.withPackages (ps: with ps; [
# Core backend dependencies
fastapi
uvicorn
sqlalchemy
alembic
pydantic
# Auth & Security
python-jose
passlib
# Image processing
pillow
# Storage
boto3
# HTTP & uploads
httpx
python-multipart
# Testing
pytest
pytest-cov
pytest-asyncio
]);
in
{
devShells.default = pkgs.mkShell {
buildInputs = with pkgs; [
# Python environment
pythonEnv
uv
ruff
# Database
postgresql
# Frontend
nodejs
nodePackages.npm
# Image processing
imagemagick
# Storage
minio
minio-client
# Development tools
git
direnv
# Optional: monitoring/debugging
# redis
];
shellHook = ''
echo "🚀 Reference Board Viewer Development Environment"
echo ""
echo "📦 Versions:"
echo " Python: $(python --version)"
echo " Node.js: $(node --version)"
echo " PostgreSQL: $(psql --version | head -n1)"
echo " MinIO: $(minio --version | head -n1)"
echo ""
echo "📚 Quick Commands:"
echo " Backend: cd backend && uvicorn app.main:app --reload"
echo " Frontend: cd frontend && npm run dev"
echo " Database: psql webref"
echo " Tests: cd backend && pytest --cov"
echo " MinIO: minio server ~/minio-data --console-address :9001"
echo ""
echo "📖 Documentation:"
echo " API Docs: http://localhost:8000/docs"
echo " App: http://localhost:5173"
echo " MinIO UI: http://localhost:9001"
echo ""
# Set up environment variables
export DATABASE_URL="postgresql://localhost/webref"
export PYTHONPATH="$PWD/backend:$PYTHONPATH"
'';
};
# Package definitions (for production deployment)
packages = {
# Backend package
backend = pkgs.python3Packages.buildPythonApplication {
pname = "webref-backend";
version = "1.0.0";
src = ./backend;
propagatedBuildInputs = with pkgs.python3Packages; [
fastapi
uvicorn
sqlalchemy
alembic
pydantic
python-jose
passlib
pillow
boto3
httpx
python-multipart
];
};
# Frontend package
frontend = pkgs.buildNpmPackage {
pname = "webref-frontend";
version = "1.0.0";
src = ./frontend;
npmDepsHash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="; # Update after first build
buildPhase = ''
npm run build
'';
installPhase = ''
mkdir -p $out
cp -r build/* $out/
'';
};
};
}
);
}

51
frontend/.eslintrc.cjs Normal file
View File

@@ -0,0 +1,51 @@
module.exports = {
root: true,
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
'plugin:svelte/recommended',
'prettier'
],
parser: '@typescript-eslint/parser',
plugins: ['@typescript-eslint'],
parserOptions: {
sourceType: 'module',
ecmaVersion: 2020,
extraFileExtensions: ['.svelte']
},
env: {
browser: true,
es2017: true,
node: true
},
overrides: [
{
files: ['*.svelte'],
parser: 'svelte-eslint-parser',
parserOptions: {
parser: '@typescript-eslint/parser'
}
}
],
rules: {
// TypeScript rules
'@typescript-eslint/no-unused-vars': [
'error',
{
argsIgnorePattern: '^_',
varsIgnorePattern: '^_'
}
],
'@typescript-eslint/no-explicit-any': 'warn',
// General rules
'no-console': ['warn', { allow: ['warn', 'error'] }],
'prefer-const': 'error',
'no-var': 'error',
// Svelte specific
'svelte/no-at-html-tags': 'error',
'svelte/no-target-blank': 'error'
}
};

18
frontend/.prettierrc Normal file
View File

@@ -0,0 +1,18 @@
{
"useTabs": false,
"tabWidth": 2,
"singleQuote": true,
"trailingComma": "es5",
"printWidth": 100,
"semi": true,
"plugins": ["prettier-plugin-svelte"],
"overrides": [
{
"files": "*.svelte",
"options": {
"parser": "svelte"
}
}
]
}

42
frontend/package.json Normal file
View File

@@ -0,0 +1,42 @@
{
"name": "webref-frontend",
"version": "1.0.0",
"private": true,
"description": "Reference Board Viewer - Frontend Application",
"type": "module",
"scripts": {
"dev": "vite dev",
"build": "vite build",
"preview": "vite preview",
"test": "vitest",
"test:coverage": "vitest --coverage",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
"lint": "eslint .",
"format": "prettier --write ."
},
"devDependencies": {
"@sveltejs/adapter-auto": "^3.0.0",
"@sveltejs/kit": "^2.0.0",
"@sveltejs/vite-plugin-svelte": "^3.0.0",
"@types/node": "^22.0.0",
"@typescript-eslint/eslint-plugin": "^7.0.0",
"@typescript-eslint/parser": "^7.0.0",
"@vitest/coverage-v8": "^2.0.0",
"eslint": "^8.56.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-svelte": "^2.35.1",
"prettier": "^3.2.5",
"prettier-plugin-svelte": "^3.1.2",
"svelte": "^4.2.0",
"svelte-check": "^3.6.0",
"tslib": "^2.6.2",
"typescript": "^5.3.3",
"vite": "^5.0.3",
"vitest": "^2.0.0"
},
"dependencies": {
"konva": "^9.3.0"
}
}

34
frontend/vitest.config.ts Normal file
View File

@@ -0,0 +1,34 @@
import { defineConfig } from 'vitest/config';
import { svelte } from '@sveltejs/vite-plugin-svelte';
export default defineConfig({
plugins: [svelte({ hot: !process.env.VITEST })],
test: {
globals: true,
environment: 'jsdom',
coverage: {
provider: 'v8',
reporter: ['text', 'json', 'html'],
include: ['src/**/*.{js,ts,svelte}'],
exclude: [
'node_modules/',
'src/**/*.test.{js,ts}',
'src/**/*.spec.{js,ts}',
'.svelte-kit/**',
'build/**',
],
thresholds: {
lines: 80,
functions: 80,
branches: 80,
statements: 80,
},
},
},
resolve: {
alias: {
$lib: '/src/lib',
},
},
});

View File

@@ -5,15 +5,67 @@
pkgs.mkShell {
packages =
[
# Python with development packages
(pkgs.python3.withPackages (
ps:
builtins.attrValues {
inherit (ps) setuptools;
inherit (ps)
setuptools
pip
# Core backend dependencies
fastapi
uvicorn
sqlalchemy
alembic
pydantic
# Auth & Security
python-jose
passlib
# Image processing
pillow
# Storage
boto3
# HTTP & uploads
httpx
python-multipart
# Testing
pytest
pytest-cov
pytest-asyncio
;
}
))
]
++ builtins.attrValues {
inherit (pkgs) uv;
};
inherit (pkgs)
# Python tools
uv
ruff
# Database
postgresql
# Frontend
nodejs
# Image processing
imagemagick
# Version control
git
# Development tools
direnv
;
};
buildInputs = [ ];
shellHook = ''
echo "🚀 Reference Board Viewer Development Environment"
echo " Python: $(python --version)"
echo " Node.js: $(node --version)"
echo " PostgreSQL: $(psql --version | head -n1)"
echo ""
echo "📚 Quick Commands:"
echo " Backend: cd backend && uvicorn app.main:app --reload"
echo " Frontend: cd frontend && npm run dev"
echo " Tests: cd backend && pytest --cov"
echo ""
'';
}

View File

@@ -32,26 +32,26 @@ Implementation tasks for the Reference Board Viewer, organized by user story (fu
**Goal:** Set up development environment, project structure, and CI/CD
- [ ] T001 Initialize Git repository structure (README.md, .gitignore, .editorconfig)
- [ ] T002 [P] Create flake.nix with development environment per nix-package-verification.md
- [ ] T003 [P] Update shell.nix with all dependencies from nix-package-verification.md
- [ ] T004 [P] Create .envrc for direnv automatic shell activation
- [ ] T005 Initialize backend directory structure in backend/app/{auth,boards,images,database,api,core}
- [ ] T006 [P] Initialize frontend directory with SvelteKit: frontend/src/{lib,routes}
- [ ] T007 [P] Create backend/pyproject.toml with uv and dependencies
- [ ] T008 [P] Create frontend/package.json with Svelte + Konva.js dependencies
- [ ] T009 Set up pre-commit hooks in .pre-commit-config.yaml (Ruff, ESLint, Prettier)
- [ ] T010 [P] Create CI/CD pipeline config (.github/workflows/ci.yml or equivalent)
- [ ] T011 [P] Create backend/.env.example with all environment variables
- [ ] T012 [P] Create frontend/.env.example with API_URL and feature flags
- [ ] T013 [P] Configure Ruff in backend/pyproject.toml with Python linting rules
- [ ] T014 [P] Configure ESLint + Prettier in frontend/.eslintrc.js and .prettierrc
- [ ] T015 Create pytest configuration in backend/pytest.ini with coverage threshold 80%
- [ ] T016 [P] Configure Vitest in frontend/vite.config.js for frontend testing
- [ ] T017 Create backend/alembic.ini for database migrations
- [ ] T018 Initialize Alembic migrations in backend/alembic/versions/
- [ ] T019 [P] Create documentation structure in docs/{api,user-guide,deployment}
- [ ] T020 Create Docker Compose for local development (PostgreSQL + MinIO) in docker-compose.dev.yml
- [X] T001 Initialize Git repository structure (README.md, .gitignore, .editorconfig)
- [X] T002 [P] Create flake.nix with development environment per nix-package-verification.md
- [X] T003 [P] Update shell.nix with all dependencies from nix-package-verification.md
- [X] T004 [P] Create .envrc for direnv automatic shell activation
- [X] T005 Initialize backend directory structure in backend/app/{auth,boards,images,database,api,core}
- [X] T006 [P] Initialize frontend directory with SvelteKit: frontend/src/{lib,routes}
- [X] T007 [P] Create backend/pyproject.toml with uv and dependencies
- [X] T008 [P] Create frontend/package.json with Svelte + Konva.js dependencies
- [X] T009 Set up pre-commit hooks in .pre-commit-config.yaml (Ruff, ESLint, Prettier)
- [X] T010 [P] Create CI/CD pipeline config (.github/workflows/ci.yml or equivalent)
- [X] T011 [P] Create backend/.env.example with all environment variables
- [X] T012 [P] Create frontend/.env.example with API_URL and feature flags
- [X] T013 [P] Configure Ruff in backend/pyproject.toml with Python linting rules
- [X] T014 [P] Configure ESLint + Prettier in frontend/.eslintrc.js and .prettierrc
- [X] T015 Create pytest configuration in backend/pytest.ini with coverage threshold 80%
- [X] T016 [P] Configure Vitest in frontend/vite.config.js for frontend testing
- [X] T017 Create backend/alembic.ini for database migrations
- [X] T018 Initialize Alembic migrations in backend/alembic/versions/
- [X] T019 [P] Create documentation structure in docs/{api,user-guide,deployment}
- [X] T020 Create Docker Compose for local development (PostgreSQL + MinIO) in docker-compose.dev.yml
**Deliverables:**
- Complete project structure
@@ -65,21 +65,21 @@ Implementation tasks for the Reference Board Viewer, organized by user story (fu
**Goal:** Database schema, configuration, shared utilities
- [ ] T021 [P] Create database configuration in backend/app/core/config.py (load from .env)
- [ ] T022 [P] Create database connection in backend/app/database/session.py (SQLAlchemy engine)
- [ ] T023 [P] Create base database model in backend/app/database/base.py (declarative base)
- [ ] T024 [P] Implement dependency injection utilities in backend/app/core/deps.py (get_db session)
- [X] T021 [P] Create database configuration in backend/app/core/config.py (load from .env)
- [X] T022 [P] Create database connection in backend/app/database/session.py (SQLAlchemy engine)
- [X] T023 [P] Create base database model in backend/app/database/base.py (declarative base)
- [X] T024 [P] Implement dependency injection utilities in backend/app/core/deps.py (get_db session)
- [ ] T025 Create initial migration 001_initial_schema.py implementing full schema from data-model.md
- [ ] T026 [P] Create CORS middleware configuration in backend/app/core/middleware.py
- [ ] T027 [P] Create error handler utilities in backend/app/core/errors.py (exception classes)
- [ ] T028 [P] Implement response schemas in backend/app/core/schemas.py (base Pydantic models)
- [ ] T029 [P] Create MinIO client utility in backend/app/core/storage.py (boto3 wrapper)
- [ ] T030 [P] Create logging configuration in backend/app/core/logging.py
- [ ] T031 [P] Create FastAPI app initialization in backend/app/main.py with all middleware
- [ ] T032 [P] Create frontend API client base in frontend/src/lib/api/client.ts (fetch wrapper with auth)
- [ ] T033 [P] Create frontend auth store in frontend/src/lib/stores/auth.ts (Svelte writable store)
- [ ] T034 [P] Create frontend error handling utilities in frontend/src/lib/utils/errors.ts
- [ ] T035 [P] Implement frontend toast notification system in frontend/src/lib/components/Toast.svelte
- [X] T026 [P] Create CORS middleware configuration in backend/app/core/middleware.py
- [X] T027 [P] Create error handler utilities in backend/app/core/errors.py (exception classes)
- [X] T028 [P] Implement response schemas in backend/app/core/schemas.py (base Pydantic models)
- [X] T029 [P] Create MinIO client utility in backend/app/core/storage.py (boto3 wrapper)
- [X] T030 [P] Create logging configuration in backend/app/core/logging.py
- [X] T031 [P] Create FastAPI app initialization in backend/app/main.py with all middleware
- [X] T032 [P] Create frontend API client base in frontend/src/lib/api/client.ts (fetch wrapper with auth)
- [X] T033 [P] Create frontend auth store in frontend/src/lib/stores/auth.ts (Svelte writable store)
- [X] T034 [P] Create frontend error handling utilities in frontend/src/lib/utils/errors.ts
- [X] T035 [P] Implement frontend toast notification system in frontend/src/lib/components/Toast.svelte
**Deliverables:**
- Database schema created