100 Commits

Author SHA1 Message Date
Danilo Reyes
97053901c0 mcp 2026-01-30 23:17:02 -06:00
Danilo Reyes
527fad8da0 init 2026-01-30 22:48:02 -06:00
2a418c4394 Merge pull request '001-ai-docs' (#2) from 001-ai-docs into main
Reviewed-on: #2
2026-01-30 22:20:12 -06:00
Danilo Reyes
56177eab30 constitution update 2026-01-30 22:19:06 -06:00
Danilo Reyes
d448e0f6c8 reviewing 2026-01-30 16:42:29 -06:00
Danilo Reyes
5da9abf1b7 init 2026-01-30 16:31:02 -06:00
NixOS Builder Bot
93411591ef Weekly flake update: 2026-01-30 09:47 UTC 2026-01-30 03:47:10 -06:00
Danilo Reyes
832054d987 prem2resolve
All checks were successful
Weekly NixOS Build & Cache / build-and-cache (push) Successful in 16m28s
2026-01-28 06:30:10 -06:00
NixOS Builder Bot
524859e737 Weekly flake update: 2026-01-26 10:20 UTC 2026-01-26 04:20:53 -06:00
Danilo Reyes
487fb1c294 registration secret
All checks were successful
Weekly NixOS Build & Cache / build-and-cache (push) Successful in 50m10s
2026-01-25 09:47:56 -06:00
Danilo Reyes
02b234ba6e proper syncthing id 2026-01-24 20:40:27 -06:00
Danilo Reyes
0e97d8c0c3 syncthing test friend 2026-01-24 18:53:49 -06:00
Danilo Reyes
94649363bb new vps wireguard friends 2026-01-24 18:13:26 -06:00
NixOS Builder Bot
c969158d00 Weekly flake update: 2026-01-24 00:22 UTC 2026-01-23 18:22:11 -06:00
Danilo Reyes
a45c33db1e ryot v10 2026-01-23 13:31:54 -06:00
Danilo Reyes
ef20380003 ryot upgrade to 6.0 2026-01-23 13:29:48 -06:00
Danilo Reyes
f3ae6163d0 derek configs 2026-01-23 13:29:41 -06:00
Danilo Reyes
0f65ad5217 selectively picked cudaSupport
Some checks failed
Weekly NixOS Build & Cache / build-and-cache (push) Failing after 2h0m53s
2026-01-18 17:58:33 -06:00
Danilo Reyes
4f87fec66e global cuda support 2026-01-18 17:20:33 -06:00
Danilo Reyes
8c539ca436 theme change 2026-01-18 17:20:24 -06:00
Danilo Reyes
845e027272 toggle both users for gnome 2026-01-18 17:20:11 -06:00
Danilo Reyes
1f173fa3c9 gnome extensions multi-user 2026-01-18 17:20:00 -06:00
Danilo Reyes
72d97655af blender cuda support 2026-01-17 16:58:58 -06:00
Danilo Reyes
253123c09b typo fixes uid gid 2026-01-17 16:57:18 -06:00
Danilo Reyes
20c8d082eb wip uid/gid mapping 2026-01-17 13:36:51 -06:00
Danilo Reyes
1b76039f49 parsec gone 2026-01-17 10:44:15 -06:00
Danilo Reyes
09bed16ffc antivirus 2026-01-17 06:19:57 -06:00
Danilo Reyes
d146fa68f7 not auto start sunshine 2026-01-17 06:00:00 -06:00
Danilo Reyes
c87a446a26 oops 2026-01-16 17:08:27 -06:00
Danilo Reyes
8aa95491cf parsec 2026-01-16 17:07:15 -06:00
Danilo Reyes
797963dc8b jawz scripts flake update 2026-01-16 15:30:28 -06:00
Danilo Reyes
19bc6f13d0 linting 2026-01-16 15:18:31 -06:00
Danilo Reyes
7368c914b5 mkforce 2026-01-16 15:03:07 -06:00
Danilo Reyes
ba51fce5f8 stash unprivated users 2026-01-16 14:56:51 -06:00
Danilo Reyes
25d22d0983 Update org-hugo-base-dir path in Doom Emacs configuration
Changed the org-hugo-base-dir from an absolute path to a relative path using the home directory shortcut. This adjustment improves portability of the configuration across different environments.
2026-01-16 14:07:43 -06:00
Danilo Reyes
0559d212fe Add user-specific configurations for Emacs and Stylix across multiple hosts
Updated the configuration files for miniserver, server, and workstation to include user-specific settings for Emacs and Stylix. This change enhances user management consistency and aligns with the recent refactor for multi-user support in the configuration.
2026-01-16 14:06:42 -06:00
Danilo Reyes
fa19c92b1e Update color scheme polarity for 'paul' to dark in schemes.nix
Some checks failed
Build All Color Schemes / build-schemes (push) Failing after 38m58s
2026-01-16 13:47:51 -06:00
Danilo Reyes
5e96618d8a Refactor user management in configuration files for enhanced consistency
Updated multiple configuration files to implement a unified approach for user management across applications and services. Introduced `mkEnabledWithUsers` to streamline user configuration, improving maintainability and flexibility in multi-user setups.
2026-01-16 13:47:17 -06:00
Danilo Reyes
6573392c3b Refactor user option types across modules for consistency
Updated multiple configuration files to replace the user option type with a unified `usersOptionType`, enhancing consistency in user management across applications and services. This change simplifies the user configuration process and improves maintainability.
2026-01-16 13:40:44 -06:00
Danilo Reyes
f1e6015d39 Add multi-user support for package installations across various modules
Updated multiple configuration files to include a `merge` option for user management, enhancing the ability to handle multi-user setups for applications and services. This change improves flexibility in managing user-specific package installations, ensuring a more streamlined configuration process.
2026-01-16 13:38:49 -06:00
Danilo Reyes
cceb4da041 Add custom font management and multimedia video editing support
Enhanced the configuration by introducing a custom font management system, allowing for the installation of additional fonts. Updated multimedia settings to include video editing applications, enabling users to manage multimedia and video editing packages more effectively. Removed the deprecated fonts module to streamline the configuration.
2026-01-16 13:28:18 -06:00
Danilo Reyes
cbe7c25812 Add multi-user support for various applications and services
Enhanced the configuration files to support multi-user management by introducing user options for multiple applications, including art, gaming, multimedia, and development tools. Updated existing modules to utilize these new user options, improving flexibility and maintainability in user package installations.
2026-01-16 13:07:56 -06:00
Danilo Reyes
2893487ffc format 2026-01-16 11:47:15 -06:00
Danilo Reyes
9629493d02 paul rosolie <3 2026-01-16 11:46:58 -06:00
Danilo Reyes
495f6e2e25 Refactor user management functions to use inputs
Removed the `lib.nix` file and refactored various modules to utilize `inputs.self.lib` for user package and attribute management. This change enhances consistency and maintainability across the configuration files.
2026-01-16 10:55:15 -06:00
Danilo Reyes
b89d193445 Add helper functions for multi-user toggle support
Introduced a new file `lib.nix` containing helper functions to streamline user package management and attributes for multi-user configurations. Updated various modules to utilize these functions, enhancing code maintainability and readability.
2026-01-16 10:36:02 -06:00
Danilo Reyes
4e835aca1b multi-user toggle init 2026-01-16 10:19:36 -06:00
Danilo Reyes
597f9ee5b8 new control groups 2026-01-16 09:53:02 -06:00
Danilo Reyes
d4c7ea8742 derek user 2026-01-16 09:53:02 -06:00
Danilo Reyes
f630e1483b webref migrated to module 2026-01-16 09:53:02 -06:00
NixOS Builder Bot
983e271075 Weekly flake update: 2026-01-16 10:47 UTC 2026-01-16 04:47:04 -06:00
NixOS Builder Bot
7aed2c9a65 Weekly flake update: 2026-01-10 15:39 UTC 2026-01-10 09:39:57 -06:00
Danilo Reyes
9cf2b7df40 metube update
All checks were successful
Weekly NixOS Build & Cache / build-and-cache (push) Successful in 57m27s
2026-01-06 07:51:30 -06:00
NixOS Builder Bot
2a793a3dec Weekly flake update: 2026-01-05 10:14 UTC 2026-01-05 04:14:54 -06:00
NixOS Builder Bot
89c3c0badc Weekly flake update: 2026-01-02 09:58 UTC 2026-01-02 03:58:50 -06:00
Danilo Reyes
43294b20c0 torrent port
All checks were successful
Weekly NixOS Build & Cache / build-and-cache (push) Successful in 28m19s
2026-01-01 23:45:35 -06:00
NixOS Builder Bot
4b88749d17 Weekly flake update: 2025-12-29 10:38 UTC 2025-12-29 04:38:40 -06:00
Danilo Reyes
5c53ad615e media map port
All checks were successful
Weekly NixOS Build & Cache / build-and-cache (push) Successful in 1h8m19s
2025-12-28 21:01:10 -06:00
Danilo Reyes
5fbc4e1389 Update plausible configuration to reference the correct secrets file
Some checks are pending
Weekly NixOS Build & Cache / build-and-cache (push) Has started running
2025-12-26 02:34:24 -06:00
Danilo Reyes
648a22252a plausible init 2025-12-26 02:23:04 -06:00
Danilo Reyes
467708a7e6 Refactor isso configuration to streamline server settings by removing unnecessary nesting 2025-12-25 22:07:13 -06:00
Danilo Reyes
cfb0ad1a31 Remove commented-out SMTP settings from isso configuration 2025-12-25 21:59:34 -06:00
Danilo Reyes
e80425e0f6 Re-enable isso in the service toggle list 2025-12-25 21:53:13 -06:00
Danilo Reyes
7289f6c5d2 Comment out SMTP notification settings in isso configuration 2025-12-25 21:42:57 -06:00
Danilo Reyes
54609e54bb Update isso configuration to disable SMTP notifications and comment out SMTP settings 2025-12-25 21:24:05 -06:00
Danilo Reyes
4c924f6bb4 Update encrypted secrets in env.yaml for isso, mealie, and vaultwarden configurations; adjust lastmodified timestamp and mac value. 2025-12-25 21:15:14 -06:00
Danilo Reyes
8e0c2783cd Refactor isso configuration to use structured server settings and update service dependencies 2025-12-25 21:05:32 -06:00
Danilo Reyes
2dd20fab48 isso setup 2025-12-25 19:03:14 -06:00
Danilo Reyes
07a083dfa2 system rename 2025-12-25 18:57:22 -06:00
9d8defe07b Merge pull request 'keycloak' (#1) from keycloak into main
Reviewed-on: #1
2025-12-25 18:54:49 -06:00
089ea908e3 Merge branch 'main' into keycloak 2025-12-25 18:54:36 -06:00
NixOS Builder Bot
76924a4021 Weekly flake update: 2025-12-15 10:50 UTC 2025-12-15 04:50:51 -06:00
Danilo Reyes
3325d8b931 bools to string 2025-12-10 18:45:57 -06:00
Danilo Reyes
75520f3b86 mealie configs 2025-12-10 18:38:08 -06:00
Danilo Reyes
7846f5a822 hmhmm? 2025-12-10 18:29:41 -06:00
Danilo Reyes
41850af033 uhh 2025-12-10 18:08:04 -06:00
Danilo Reyes
6cf501ab62 mealie keycloak init 2025-12-10 13:51:54 -06:00
Danilo Reyes
b00459e26e paperless signon social 2025-12-10 13:08:08 -06:00
Danilo Reyes
e279e3811f paperless > http 2025-12-10 12:46:12 -06:00
Danilo Reyes
1ade9dd65a paperless test 2025-12-10 12:09:49 -06:00
Danilo Reyes
016b181d1b disable gitea registration 2025-12-10 11:31:16 -06:00
Danilo Reyes
8c55d42ba2 Remove redirect-url from oauth2-proxy configuration to simplify callback handling 2025-12-10 05:04:03 -06:00
Danilo Reyes
b864c98786 Update oauth2-proxy configuration to use dynamic Keycloak URL and enhance redirect settings 2025-12-10 04:49:35 -06:00
Danilo Reyes
451359dc4d Add code-challenge-method to oauth2-proxy configuration for enhanced security compliance 2025-12-10 04:40:01 -06:00
Danilo Reyes
7ab8789799 Remove systemd service configuration for oauth2-proxy to streamline service management 2025-12-10 04:38:27 -06:00
Danilo Reyes
b5a5d42910 Add oauth2-proxy cookie secret to configuration and update secrets.yaml for enhanced security management 2025-12-10 04:25:47 -06:00
Danilo Reyes
8f04f99c85 Refactor oauth2-proxy configuration to change 'skip-auth-routes' to 'skip-auth-route' for improved clarity 2025-12-10 04:14:51 -06:00
Danilo Reyes
dfe8ce2e4b duh, wrong secret 2025-12-10 04:06:35 -06:00
Danilo Reyes
bd26dc247b oauth 2025-12-10 04:03:05 -06:00
Danilo Reyes
3f40666ebf Add Keycloak to the enabled services list and update its configuration to include the HTTP host setting 2025-12-10 02:51:58 -06:00
Danilo Reyes
b912aa82fa Update Keycloak configuration to ensure proper handling of SOPS secrets and maintain consistency in secret file references 2025-12-10 02:41:10 -06:00
Danilo Reyes
616db8006e Refactor Keycloak configuration to include restart units and streamline secret management 2025-12-10 02:37:55 -06:00
Danilo Reyes
ba41e8f804 Update Keycloak configuration to use new password secret and modify proxy settings 2025-12-10 02:33:31 -06:00
Danilo Reyes
5289193961 Add Keycloak to enabled services and refactor configuration settings structure 2025-12-10 02:31:31 -06:00
Danilo Reyes
e714a8d184 Update Keycloak configuration to use new secrets file and adjust environment variable references 2025-12-10 02:29:34 -06:00
Danilo Reyes
4d788d90ca linting 2025-12-10 02:29:25 -06:00
Danilo Reyes
303cd2db36 Add SOPS secrets for Keycloak database password and update configuration 2025-12-10 02:12:06 -06:00
Danilo Reyes
2cd3afe2b3 Rename Keycloak database configuration key from 'databaseName' to 'name' 2025-12-10 02:06:28 -06:00
Danilo Reyes
92492b6323 Update Keycloak database configuration to use 'databaseName' instead of 'database' 2025-12-10 02:04:17 -06:00
Danilo Reyes
6d5ae474c6 keycloak init 2025-12-10 02:00:12 -06:00
129 changed files with 5345 additions and 672 deletions

View File

@@ -0,0 +1,24 @@
name: MCP Tests
on:
push:
branches: [ main ]
paths:
- 'scripts/**'
- 'docs/**'
- '.gitea/workflows/mcp-tests.yml'
pull_request:
paths:
- 'scripts/**'
- 'docs/**'
- '.gitea/workflows/mcp-tests.yml'
jobs:
mcp-tests:
runs-on: nixos
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Run MCP lint/format/tests via nix-shell
run: ./scripts/mcp-server/run-tests.sh

14
.gitignore vendored
View File

@@ -2,7 +2,21 @@
config.el
*.qcow2
result
.codex/
# Prevent accidentally committing unencrypted secrets
**/secrets/*.yaml.dec
**/*-decrypted.*
**/temp-secrets.*
# Editor/OS artifacts
.DS_Store
Thumbs.db
.vscode/
.idea/
*.swp
*.tmp
__pycache__/
*.pyc
.venv/
venv/
dist/
*.egg-info/

View File

@@ -0,0 +1,50 @@
# [PROJECT_NAME] Constitution
<!-- Example: Spec Constitution, TaskFlow Constitution, etc. -->
## Core Principles
### [PRINCIPLE_1_NAME]
<!-- Example: I. Library-First -->
[PRINCIPLE_1_DESCRIPTION]
<!-- Example: Every feature starts as a standalone library; Libraries must be self-contained, independently testable, documented; Clear purpose required - no organizational-only libraries -->
### [PRINCIPLE_2_NAME]
<!-- Example: II. CLI Interface -->
[PRINCIPLE_2_DESCRIPTION]
<!-- Example: Every library exposes functionality via CLI; Text in/out protocol: stdin/args → stdout, errors → stderr; Support JSON + human-readable formats -->
### [PRINCIPLE_3_NAME]
<!-- Example: III. Test-First (NON-NEGOTIABLE) -->
[PRINCIPLE_3_DESCRIPTION]
<!-- Example: TDD mandatory: Tests written → User approved → Tests fail → Then implement; Red-Green-Refactor cycle strictly enforced -->
### [PRINCIPLE_4_NAME]
<!-- Example: IV. Integration Testing -->
[PRINCIPLE_4_DESCRIPTION]
<!-- Example: Focus areas requiring integration tests: New library contract tests, Contract changes, Inter-service communication, Shared schemas -->
### [PRINCIPLE_5_NAME]
<!-- Example: V. Observability, VI. Versioning & Breaking Changes, VII. Simplicity -->
[PRINCIPLE_5_DESCRIPTION]
<!-- Example: Text I/O ensures debuggability; Structured logging required; Or: MAJOR.MINOR.BUILD format; Or: Start simple, YAGNI principles -->
## [SECTION_2_NAME]
<!-- Example: Additional Constraints, Security Requirements, Performance Standards, etc. -->
[SECTION_2_CONTENT]
<!-- Example: Technology stack requirements, compliance standards, deployment policies, etc. -->
## [SECTION_3_NAME]
<!-- Example: Development Workflow, Review Process, Quality Gates, etc. -->
[SECTION_3_CONTENT]
<!-- Example: Code review requirements, testing gates, deployment approval process, etc. -->
## Governance
<!-- Example: Constitution supersedes all other practices; Amendments require documentation, approval, migration plan -->
[GOVERNANCE_RULES]
<!-- Example: All PRs/reviews must verify compliance; Complexity must be justified; Use [GUIDANCE_FILE] for runtime development guidance -->
**Version**: [CONSTITUTION_VERSION] | **Ratified**: [RATIFICATION_DATE] | **Last Amended**: [LAST_AMENDED_DATE]
<!-- Example: Version: 2.1.1 | Ratified: 2025-06-13 | Last Amended: 2025-07-16 -->

View File

@@ -0,0 +1,166 @@
#!/usr/bin/env bash
# Consolidated prerequisite checking script
#
# This script provides unified prerequisite checking for Spec-Driven Development workflow.
# It replaces the functionality previously spread across multiple scripts.
#
# Usage: ./check-prerequisites.sh [OPTIONS]
#
# OPTIONS:
# --json Output in JSON format
# --require-tasks Require tasks.md to exist (for implementation phase)
# --include-tasks Include tasks.md in AVAILABLE_DOCS list
# --paths-only Only output path variables (no validation)
# --help, -h Show help message
#
# OUTPUTS:
# JSON mode: {"FEATURE_DIR":"...", "AVAILABLE_DOCS":["..."]}
# Text mode: FEATURE_DIR:... \n AVAILABLE_DOCS: \n ✓/✗ file.md
# Paths only: REPO_ROOT: ... \n BRANCH: ... \n FEATURE_DIR: ... etc.
set -e
# Parse command line arguments
JSON_MODE=false
REQUIRE_TASKS=false
INCLUDE_TASKS=false
PATHS_ONLY=false
for arg in "$@"; do
case "$arg" in
--json)
JSON_MODE=true
;;
--require-tasks)
REQUIRE_TASKS=true
;;
--include-tasks)
INCLUDE_TASKS=true
;;
--paths-only)
PATHS_ONLY=true
;;
--help|-h)
cat << 'EOF'
Usage: check-prerequisites.sh [OPTIONS]
Consolidated prerequisite checking for Spec-Driven Development workflow.
OPTIONS:
--json Output in JSON format
--require-tasks Require tasks.md to exist (for implementation phase)
--include-tasks Include tasks.md in AVAILABLE_DOCS list
--paths-only Only output path variables (no prerequisite validation)
--help, -h Show this help message
EXAMPLES:
# Check task prerequisites (plan.md required)
./check-prerequisites.sh --json
# Check implementation prerequisites (plan.md + tasks.md required)
./check-prerequisites.sh --json --require-tasks --include-tasks
# Get feature paths only (no validation)
./check-prerequisites.sh --paths-only
EOF
exit 0
;;
*)
echo "ERROR: Unknown option '$arg'. Use --help for usage information." >&2
exit 1
;;
esac
done
# Source common functions
SCRIPT_DIR="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/common.sh"
# Get feature paths and validate branch
eval $(get_feature_paths)
check_feature_branch "$CURRENT_BRANCH" "$HAS_GIT" || exit 1
# If paths-only mode, output paths and exit (support JSON + paths-only combined)
if $PATHS_ONLY; then
if $JSON_MODE; then
# Minimal JSON paths payload (no validation performed)
printf '{"REPO_ROOT":"%s","BRANCH":"%s","FEATURE_DIR":"%s","FEATURE_SPEC":"%s","IMPL_PLAN":"%s","TASKS":"%s"}\n' \
"$REPO_ROOT" "$CURRENT_BRANCH" "$FEATURE_DIR" "$FEATURE_SPEC" "$IMPL_PLAN" "$TASKS"
else
echo "REPO_ROOT: $REPO_ROOT"
echo "BRANCH: $CURRENT_BRANCH"
echo "FEATURE_DIR: $FEATURE_DIR"
echo "FEATURE_SPEC: $FEATURE_SPEC"
echo "IMPL_PLAN: $IMPL_PLAN"
echo "TASKS: $TASKS"
fi
exit 0
fi
# Validate required directories and files
if [[ ! -d "$FEATURE_DIR" ]]; then
echo "ERROR: Feature directory not found: $FEATURE_DIR" >&2
echo "Run /speckit.specify first to create the feature structure." >&2
exit 1
fi
if [[ ! -f "$IMPL_PLAN" ]]; then
echo "ERROR: plan.md not found in $FEATURE_DIR" >&2
echo "Run /speckit.plan first to create the implementation plan." >&2
exit 1
fi
# Check for tasks.md if required
if $REQUIRE_TASKS && [[ ! -f "$TASKS" ]]; then
echo "ERROR: tasks.md not found in $FEATURE_DIR" >&2
echo "Run /speckit.tasks first to create the task list." >&2
exit 1
fi
# Build list of available documents
docs=()
# Always check these optional docs
[[ -f "$RESEARCH" ]] && docs+=("research.md")
[[ -f "$DATA_MODEL" ]] && docs+=("data-model.md")
# Check contracts directory (only if it exists and has files)
if [[ -d "$CONTRACTS_DIR" ]] && [[ -n "$(ls -A "$CONTRACTS_DIR" 2>/dev/null)" ]]; then
docs+=("contracts/")
fi
[[ -f "$QUICKSTART" ]] && docs+=("quickstart.md")
# Include tasks.md if requested and it exists
if $INCLUDE_TASKS && [[ -f "$TASKS" ]]; then
docs+=("tasks.md")
fi
# Output results
if $JSON_MODE; then
# Build JSON array of documents
if [[ ${#docs[@]} -eq 0 ]]; then
json_docs="[]"
else
json_docs=$(printf '"%s",' "${docs[@]}")
json_docs="[${json_docs%,}]"
fi
printf '{"FEATURE_DIR":"%s","AVAILABLE_DOCS":%s}\n' "$FEATURE_DIR" "$json_docs"
else
# Text output
echo "FEATURE_DIR:$FEATURE_DIR"
echo "AVAILABLE_DOCS:"
# Show status of each potential document
check_file "$RESEARCH" "research.md"
check_file "$DATA_MODEL" "data-model.md"
check_dir "$CONTRACTS_DIR" "contracts/"
check_file "$QUICKSTART" "quickstart.md"
if $INCLUDE_TASKS; then
check_file "$TASKS" "tasks.md"
fi
fi

156
.specify/scripts/bash/common.sh Executable file
View File

@@ -0,0 +1,156 @@
#!/usr/bin/env bash
# Common functions and variables for all scripts
# Get repository root, with fallback for non-git repositories
get_repo_root() {
if git rev-parse --show-toplevel >/dev/null 2>&1; then
git rev-parse --show-toplevel
else
# Fall back to script location for non-git repos
local script_dir="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
(cd "$script_dir/../../.." && pwd)
fi
}
# Get current branch, with fallback for non-git repositories
get_current_branch() {
# First check if SPECIFY_FEATURE environment variable is set
if [[ -n "${SPECIFY_FEATURE:-}" ]]; then
echo "$SPECIFY_FEATURE"
return
fi
# Then check git if available
if git rev-parse --abbrev-ref HEAD >/dev/null 2>&1; then
git rev-parse --abbrev-ref HEAD
return
fi
# For non-git repos, try to find the latest feature directory
local repo_root=$(get_repo_root)
local specs_dir="$repo_root/specs"
if [[ -d "$specs_dir" ]]; then
local latest_feature=""
local highest=0
for dir in "$specs_dir"/*; do
if [[ -d "$dir" ]]; then
local dirname=$(basename "$dir")
if [[ "$dirname" =~ ^([0-9]{3})- ]]; then
local number=${BASH_REMATCH[1]}
number=$((10#$number))
if [[ "$number" -gt "$highest" ]]; then
highest=$number
latest_feature=$dirname
fi
fi
fi
done
if [[ -n "$latest_feature" ]]; then
echo "$latest_feature"
return
fi
fi
echo "main" # Final fallback
}
# Check if we have git available
has_git() {
git rev-parse --show-toplevel >/dev/null 2>&1
}
check_feature_branch() {
local branch="$1"
local has_git_repo="$2"
# For non-git repos, we can't enforce branch naming but still provide output
if [[ "$has_git_repo" != "true" ]]; then
echo "[specify] Warning: Git repository not detected; skipped branch validation" >&2
return 0
fi
if [[ ! "$branch" =~ ^[0-9]{3}- ]]; then
echo "ERROR: Not on a feature branch. Current branch: $branch" >&2
echo "Feature branches should be named like: 001-feature-name" >&2
return 1
fi
return 0
}
get_feature_dir() { echo "$1/specs/$2"; }
# Find feature directory by numeric prefix instead of exact branch match
# This allows multiple branches to work on the same spec (e.g., 004-fix-bug, 004-add-feature)
find_feature_dir_by_prefix() {
local repo_root="$1"
local branch_name="$2"
local specs_dir="$repo_root/specs"
# Extract numeric prefix from branch (e.g., "004" from "004-whatever")
if [[ ! "$branch_name" =~ ^([0-9]{3})- ]]; then
# If branch doesn't have numeric prefix, fall back to exact match
echo "$specs_dir/$branch_name"
return
fi
local prefix="${BASH_REMATCH[1]}"
# Search for directories in specs/ that start with this prefix
local matches=()
if [[ -d "$specs_dir" ]]; then
for dir in "$specs_dir"/"$prefix"-*; do
if [[ -d "$dir" ]]; then
matches+=("$(basename "$dir")")
fi
done
fi
# Handle results
if [[ ${#matches[@]} -eq 0 ]]; then
# No match found - return the branch name path (will fail later with clear error)
echo "$specs_dir/$branch_name"
elif [[ ${#matches[@]} -eq 1 ]]; then
# Exactly one match - perfect!
echo "$specs_dir/${matches[0]}"
else
# Multiple matches - this shouldn't happen with proper naming convention
echo "ERROR: Multiple spec directories found with prefix '$prefix': ${matches[*]}" >&2
echo "Please ensure only one spec directory exists per numeric prefix." >&2
echo "$specs_dir/$branch_name" # Return something to avoid breaking the script
fi
}
get_feature_paths() {
local repo_root=$(get_repo_root)
local current_branch=$(get_current_branch)
local has_git_repo="false"
if has_git; then
has_git_repo="true"
fi
# Use prefix-based lookup to support multiple branches per spec
local feature_dir=$(find_feature_dir_by_prefix "$repo_root" "$current_branch")
cat <<EOF
REPO_ROOT='$repo_root'
CURRENT_BRANCH='$current_branch'
HAS_GIT='$has_git_repo'
FEATURE_DIR='$feature_dir'
FEATURE_SPEC='$feature_dir/spec.md'
IMPL_PLAN='$feature_dir/plan.md'
TASKS='$feature_dir/tasks.md'
RESEARCH='$feature_dir/research.md'
DATA_MODEL='$feature_dir/data-model.md'
QUICKSTART='$feature_dir/quickstart.md'
CONTRACTS_DIR='$feature_dir/contracts'
EOF
}
check_file() { [[ -f "$1" ]] && echo "$2" || echo "$2"; }
check_dir() { [[ -d "$1" && -n $(ls -A "$1" 2>/dev/null) ]] && echo "$2" || echo "$2"; }

View File

@@ -0,0 +1,297 @@
#!/usr/bin/env bash
set -e
JSON_MODE=false
SHORT_NAME=""
BRANCH_NUMBER=""
ARGS=()
i=1
while [ $i -le $# ]; do
arg="${!i}"
case "$arg" in
--json)
JSON_MODE=true
;;
--short-name)
if [ $((i + 1)) -gt $# ]; then
echo 'Error: --short-name requires a value' >&2
exit 1
fi
i=$((i + 1))
next_arg="${!i}"
# Check if the next argument is another option (starts with --)
if [[ "$next_arg" == --* ]]; then
echo 'Error: --short-name requires a value' >&2
exit 1
fi
SHORT_NAME="$next_arg"
;;
--number)
if [ $((i + 1)) -gt $# ]; then
echo 'Error: --number requires a value' >&2
exit 1
fi
i=$((i + 1))
next_arg="${!i}"
if [[ "$next_arg" == --* ]]; then
echo 'Error: --number requires a value' >&2
exit 1
fi
BRANCH_NUMBER="$next_arg"
;;
--help|-h)
echo "Usage: $0 [--json] [--short-name <name>] [--number N] <feature_description>"
echo ""
echo "Options:"
echo " --json Output in JSON format"
echo " --short-name <name> Provide a custom short name (2-4 words) for the branch"
echo " --number N Specify branch number manually (overrides auto-detection)"
echo " --help, -h Show this help message"
echo ""
echo "Examples:"
echo " $0 'Add user authentication system' --short-name 'user-auth'"
echo " $0 'Implement OAuth2 integration for API' --number 5"
exit 0
;;
*)
ARGS+=("$arg")
;;
esac
i=$((i + 1))
done
FEATURE_DESCRIPTION="${ARGS[*]}"
if [ -z "$FEATURE_DESCRIPTION" ]; then
echo "Usage: $0 [--json] [--short-name <name>] [--number N] <feature_description>" >&2
exit 1
fi
# Function to find the repository root by searching for existing project markers
find_repo_root() {
local dir="$1"
while [ "$dir" != "/" ]; do
if [ -d "$dir/.git" ] || [ -d "$dir/.specify" ]; then
echo "$dir"
return 0
fi
dir="$(dirname "$dir")"
done
return 1
}
# Function to get highest number from specs directory
get_highest_from_specs() {
local specs_dir="$1"
local highest=0
if [ -d "$specs_dir" ]; then
for dir in "$specs_dir"/*; do
[ -d "$dir" ] || continue
dirname=$(basename "$dir")
number=$(echo "$dirname" | grep -o '^[0-9]\+' || echo "0")
number=$((10#$number))
if [ "$number" -gt "$highest" ]; then
highest=$number
fi
done
fi
echo "$highest"
}
# Function to get highest number from git branches
get_highest_from_branches() {
local highest=0
# Get all branches (local and remote)
branches=$(git branch -a 2>/dev/null || echo "")
if [ -n "$branches" ]; then
while IFS= read -r branch; do
# Clean branch name: remove leading markers and remote prefixes
clean_branch=$(echo "$branch" | sed 's/^[* ]*//; s|^remotes/[^/]*/||')
# Extract feature number if branch matches pattern ###-*
if echo "$clean_branch" | grep -q '^[0-9]\{3\}-'; then
number=$(echo "$clean_branch" | grep -o '^[0-9]\{3\}' || echo "0")
number=$((10#$number))
if [ "$number" -gt "$highest" ]; then
highest=$number
fi
fi
done <<< "$branches"
fi
echo "$highest"
}
# Function to check existing branches (local and remote) and return next available number
check_existing_branches() {
local specs_dir="$1"
# Fetch all remotes to get latest branch info (suppress errors if no remotes)
git fetch --all --prune 2>/dev/null || true
# Get highest number from ALL branches (not just matching short name)
local highest_branch=$(get_highest_from_branches)
# Get highest number from ALL specs (not just matching short name)
local highest_spec=$(get_highest_from_specs "$specs_dir")
# Take the maximum of both
local max_num=$highest_branch
if [ "$highest_spec" -gt "$max_num" ]; then
max_num=$highest_spec
fi
# Return next number
echo $((max_num + 1))
}
# Function to clean and format a branch name
clean_branch_name() {
local name="$1"
echo "$name" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9]/-/g' | sed 's/-\+/-/g' | sed 's/^-//' | sed 's/-$//'
}
# Resolve repository root. Prefer git information when available, but fall back
# to searching for repository markers so the workflow still functions in repositories that
# were initialised with --no-git.
SCRIPT_DIR="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if git rev-parse --show-toplevel >/dev/null 2>&1; then
REPO_ROOT=$(git rev-parse --show-toplevel)
HAS_GIT=true
else
REPO_ROOT="$(find_repo_root "$SCRIPT_DIR")"
if [ -z "$REPO_ROOT" ]; then
echo "Error: Could not determine repository root. Please run this script from within the repository." >&2
exit 1
fi
HAS_GIT=false
fi
cd "$REPO_ROOT"
SPECS_DIR="$REPO_ROOT/specs"
mkdir -p "$SPECS_DIR"
# Function to generate branch name with stop word filtering and length filtering
generate_branch_name() {
local description="$1"
# Common stop words to filter out
local stop_words="^(i|a|an|the|to|for|of|in|on|at|by|with|from|is|are|was|were|be|been|being|have|has|had|do|does|did|will|would|should|could|can|may|might|must|shall|this|that|these|those|my|your|our|their|want|need|add|get|set)$"
# Convert to lowercase and split into words
local clean_name=$(echo "$description" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9]/ /g')
# Filter words: remove stop words and words shorter than 3 chars (unless they're uppercase acronyms in original)
local meaningful_words=()
for word in $clean_name; do
# Skip empty words
[ -z "$word" ] && continue
# Keep words that are NOT stop words AND (length >= 3 OR are potential acronyms)
if ! echo "$word" | grep -qiE "$stop_words"; then
if [ ${#word} -ge 3 ]; then
meaningful_words+=("$word")
elif echo "$description" | grep -q "\b${word^^}\b"; then
# Keep short words if they appear as uppercase in original (likely acronyms)
meaningful_words+=("$word")
fi
fi
done
# If we have meaningful words, use first 3-4 of them
if [ ${#meaningful_words[@]} -gt 0 ]; then
local max_words=3
if [ ${#meaningful_words[@]} -eq 4 ]; then max_words=4; fi
local result=""
local count=0
for word in "${meaningful_words[@]}"; do
if [ $count -ge $max_words ]; then break; fi
if [ -n "$result" ]; then result="$result-"; fi
result="$result$word"
count=$((count + 1))
done
echo "$result"
else
# Fallback to original logic if no meaningful words found
local cleaned=$(clean_branch_name "$description")
echo "$cleaned" | tr '-' '\n' | grep -v '^$' | head -3 | tr '\n' '-' | sed 's/-$//'
fi
}
# Generate branch name
if [ -n "$SHORT_NAME" ]; then
# Use provided short name, just clean it up
BRANCH_SUFFIX=$(clean_branch_name "$SHORT_NAME")
else
# Generate from description with smart filtering
BRANCH_SUFFIX=$(generate_branch_name "$FEATURE_DESCRIPTION")
fi
# Determine branch number
if [ -z "$BRANCH_NUMBER" ]; then
if [ "$HAS_GIT" = true ]; then
# Check existing branches on remotes
BRANCH_NUMBER=$(check_existing_branches "$SPECS_DIR")
else
# Fall back to local directory check
HIGHEST=$(get_highest_from_specs "$SPECS_DIR")
BRANCH_NUMBER=$((HIGHEST + 1))
fi
fi
# Force base-10 interpretation to prevent octal conversion (e.g., 010 → 8 in octal, but should be 10 in decimal)
FEATURE_NUM=$(printf "%03d" "$((10#$BRANCH_NUMBER))")
BRANCH_NAME="${FEATURE_NUM}-${BRANCH_SUFFIX}"
# GitHub enforces a 244-byte limit on branch names
# Validate and truncate if necessary
MAX_BRANCH_LENGTH=244
if [ ${#BRANCH_NAME} -gt $MAX_BRANCH_LENGTH ]; then
# Calculate how much we need to trim from suffix
# Account for: feature number (3) + hyphen (1) = 4 chars
MAX_SUFFIX_LENGTH=$((MAX_BRANCH_LENGTH - 4))
# Truncate suffix at word boundary if possible
TRUNCATED_SUFFIX=$(echo "$BRANCH_SUFFIX" | cut -c1-$MAX_SUFFIX_LENGTH)
# Remove trailing hyphen if truncation created one
TRUNCATED_SUFFIX=$(echo "$TRUNCATED_SUFFIX" | sed 's/-$//')
ORIGINAL_BRANCH_NAME="$BRANCH_NAME"
BRANCH_NAME="${FEATURE_NUM}-${TRUNCATED_SUFFIX}"
>&2 echo "[specify] Warning: Branch name exceeded GitHub's 244-byte limit"
>&2 echo "[specify] Original: $ORIGINAL_BRANCH_NAME (${#ORIGINAL_BRANCH_NAME} bytes)"
>&2 echo "[specify] Truncated to: $BRANCH_NAME (${#BRANCH_NAME} bytes)"
fi
if [ "$HAS_GIT" = true ]; then
git checkout -b "$BRANCH_NAME"
else
>&2 echo "[specify] Warning: Git repository not detected; skipped branch creation for $BRANCH_NAME"
fi
FEATURE_DIR="$SPECS_DIR/$BRANCH_NAME"
mkdir -p "$FEATURE_DIR"
TEMPLATE="$REPO_ROOT/.specify/templates/spec-template.md"
SPEC_FILE="$FEATURE_DIR/spec.md"
if [ -f "$TEMPLATE" ]; then cp "$TEMPLATE" "$SPEC_FILE"; else touch "$SPEC_FILE"; fi
# Set the SPECIFY_FEATURE environment variable for the current session
export SPECIFY_FEATURE="$BRANCH_NAME"
if $JSON_MODE; then
printf '{"BRANCH_NAME":"%s","SPEC_FILE":"%s","FEATURE_NUM":"%s"}\n' "$BRANCH_NAME" "$SPEC_FILE" "$FEATURE_NUM"
else
echo "BRANCH_NAME: $BRANCH_NAME"
echo "SPEC_FILE: $SPEC_FILE"
echo "FEATURE_NUM: $FEATURE_NUM"
echo "SPECIFY_FEATURE environment variable set to: $BRANCH_NAME"
fi

View File

@@ -0,0 +1,61 @@
#!/usr/bin/env bash
set -e
# Parse command line arguments
JSON_MODE=false
ARGS=()
for arg in "$@"; do
case "$arg" in
--json)
JSON_MODE=true
;;
--help|-h)
echo "Usage: $0 [--json]"
echo " --json Output results in JSON format"
echo " --help Show this help message"
exit 0
;;
*)
ARGS+=("$arg")
;;
esac
done
# Get script directory and load common functions
SCRIPT_DIR="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/common.sh"
# Get all paths and variables from common functions
eval $(get_feature_paths)
# Check if we're on a proper feature branch (only for git repos)
check_feature_branch "$CURRENT_BRANCH" "$HAS_GIT" || exit 1
# Ensure the feature directory exists
mkdir -p "$FEATURE_DIR"
# Copy plan template if it exists
TEMPLATE="$REPO_ROOT/.specify/templates/plan-template.md"
if [[ -f "$TEMPLATE" ]]; then
cp "$TEMPLATE" "$IMPL_PLAN"
echo "Copied plan template to $IMPL_PLAN"
else
echo "Warning: Plan template not found at $TEMPLATE"
# Create a basic plan file if template doesn't exist
touch "$IMPL_PLAN"
fi
# Output results
if $JSON_MODE; then
printf '{"FEATURE_SPEC":"%s","IMPL_PLAN":"%s","SPECS_DIR":"%s","BRANCH":"%s","HAS_GIT":"%s"}\n' \
"$FEATURE_SPEC" "$IMPL_PLAN" "$FEATURE_DIR" "$CURRENT_BRANCH" "$HAS_GIT"
else
echo "FEATURE_SPEC: $FEATURE_SPEC"
echo "IMPL_PLAN: $IMPL_PLAN"
echo "SPECS_DIR: $FEATURE_DIR"
echo "BRANCH: $CURRENT_BRANCH"
echo "HAS_GIT: $HAS_GIT"
fi

View File

@@ -0,0 +1,799 @@
#!/usr/bin/env bash
# Update agent context files with information from plan.md
#
# This script maintains AI agent context files by parsing feature specifications
# and updating agent-specific configuration files with project information.
#
# MAIN FUNCTIONS:
# 1. Environment Validation
# - Verifies git repository structure and branch information
# - Checks for required plan.md files and templates
# - Validates file permissions and accessibility
#
# 2. Plan Data Extraction
# - Parses plan.md files to extract project metadata
# - Identifies language/version, frameworks, databases, and project types
# - Handles missing or incomplete specification data gracefully
#
# 3. Agent File Management
# - Creates new agent context files from templates when needed
# - Updates existing agent files with new project information
# - Preserves manual additions and custom configurations
# - Supports multiple AI agent formats and directory structures
#
# 4. Content Generation
# - Generates language-specific build/test commands
# - Creates appropriate project directory structures
# - Updates technology stacks and recent changes sections
# - Maintains consistent formatting and timestamps
#
# 5. Multi-Agent Support
# - Handles agent-specific file paths and naming conventions
# - Supports: Claude, Gemini, Copilot, Cursor, Qwen, opencode, Codex, Windsurf, Kilo Code, Auggie CLI, Roo Code, CodeBuddy CLI, Qoder CLI, Amp, SHAI, or Amazon Q Developer CLI
# - Can update single agents or all existing agent files
# - Creates default Claude file if no agent files exist
#
# Usage: ./update-agent-context.sh [agent_type]
# Agent types: claude|gemini|copilot|cursor-agent|qwen|opencode|codex|windsurf|kilocode|auggie|shai|q|bob|qoder
# Leave empty to update all existing agent files
set -e
# Enable strict error handling
set -u
set -o pipefail
#==============================================================================
# Configuration and Global Variables
#==============================================================================
# Get script directory and load common functions
SCRIPT_DIR="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/common.sh"
# Get all paths and variables from common functions
eval $(get_feature_paths)
NEW_PLAN="$IMPL_PLAN" # Alias for compatibility with existing code
AGENT_TYPE="${1:-}"
# Agent-specific file paths
CLAUDE_FILE="$REPO_ROOT/CLAUDE.md"
GEMINI_FILE="$REPO_ROOT/GEMINI.md"
COPILOT_FILE="$REPO_ROOT/.github/agents/copilot-instructions.md"
CURSOR_FILE="$REPO_ROOT/.cursor/rules/specify-rules.mdc"
QWEN_FILE="$REPO_ROOT/QWEN.md"
AGENTS_FILE="$REPO_ROOT/AGENTS.md"
WINDSURF_FILE="$REPO_ROOT/.windsurf/rules/specify-rules.md"
KILOCODE_FILE="$REPO_ROOT/.kilocode/rules/specify-rules.md"
AUGGIE_FILE="$REPO_ROOT/.augment/rules/specify-rules.md"
ROO_FILE="$REPO_ROOT/.roo/rules/specify-rules.md"
CODEBUDDY_FILE="$REPO_ROOT/CODEBUDDY.md"
QODER_FILE="$REPO_ROOT/QODER.md"
AMP_FILE="$REPO_ROOT/AGENTS.md"
SHAI_FILE="$REPO_ROOT/SHAI.md"
Q_FILE="$REPO_ROOT/AGENTS.md"
BOB_FILE="$REPO_ROOT/AGENTS.md"
# Template file
TEMPLATE_FILE="$REPO_ROOT/.specify/templates/agent-file-template.md"
# Global variables for parsed plan data
NEW_LANG=""
NEW_FRAMEWORK=""
NEW_DB=""
NEW_PROJECT_TYPE=""
#==============================================================================
# Utility Functions
#==============================================================================
log_info() {
echo "INFO: $1"
}
log_success() {
echo "$1"
}
log_error() {
echo "ERROR: $1" >&2
}
log_warning() {
echo "WARNING: $1" >&2
}
# Cleanup function for temporary files
cleanup() {
local exit_code=$?
rm -f /tmp/agent_update_*_$$
rm -f /tmp/manual_additions_$$
exit $exit_code
}
# Set up cleanup trap
trap cleanup EXIT INT TERM
#==============================================================================
# Validation Functions
#==============================================================================
validate_environment() {
# Check if we have a current branch/feature (git or non-git)
if [[ -z "$CURRENT_BRANCH" ]]; then
log_error "Unable to determine current feature"
if [[ "$HAS_GIT" == "true" ]]; then
log_info "Make sure you're on a feature branch"
else
log_info "Set SPECIFY_FEATURE environment variable or create a feature first"
fi
exit 1
fi
# Check if plan.md exists
if [[ ! -f "$NEW_PLAN" ]]; then
log_error "No plan.md found at $NEW_PLAN"
log_info "Make sure you're working on a feature with a corresponding spec directory"
if [[ "$HAS_GIT" != "true" ]]; then
log_info "Use: export SPECIFY_FEATURE=your-feature-name or create a new feature first"
fi
exit 1
fi
# Check if template exists (needed for new files)
if [[ ! -f "$TEMPLATE_FILE" ]]; then
log_warning "Template file not found at $TEMPLATE_FILE"
log_warning "Creating new agent files will fail"
fi
}
#==============================================================================
# Plan Parsing Functions
#==============================================================================
extract_plan_field() {
local field_pattern="$1"
local plan_file="$2"
grep "^\*\*${field_pattern}\*\*: " "$plan_file" 2>/dev/null | \
head -1 | \
sed "s|^\*\*${field_pattern}\*\*: ||" | \
sed 's/^[ \t]*//;s/[ \t]*$//' | \
grep -v "NEEDS CLARIFICATION" | \
grep -v "^N/A$" || echo ""
}
parse_plan_data() {
local plan_file="$1"
if [[ ! -f "$plan_file" ]]; then
log_error "Plan file not found: $plan_file"
return 1
fi
if [[ ! -r "$plan_file" ]]; then
log_error "Plan file is not readable: $plan_file"
return 1
fi
log_info "Parsing plan data from $plan_file"
NEW_LANG=$(extract_plan_field "Language/Version" "$plan_file")
NEW_FRAMEWORK=$(extract_plan_field "Primary Dependencies" "$plan_file")
NEW_DB=$(extract_plan_field "Storage" "$plan_file")
NEW_PROJECT_TYPE=$(extract_plan_field "Project Type" "$plan_file")
# Log what we found
if [[ -n "$NEW_LANG" ]]; then
log_info "Found language: $NEW_LANG"
else
log_warning "No language information found in plan"
fi
if [[ -n "$NEW_FRAMEWORK" ]]; then
log_info "Found framework: $NEW_FRAMEWORK"
fi
if [[ -n "$NEW_DB" ]] && [[ "$NEW_DB" != "N/A" ]]; then
log_info "Found database: $NEW_DB"
fi
if [[ -n "$NEW_PROJECT_TYPE" ]]; then
log_info "Found project type: $NEW_PROJECT_TYPE"
fi
}
format_technology_stack() {
local lang="$1"
local framework="$2"
local parts=()
# Add non-empty parts
[[ -n "$lang" && "$lang" != "NEEDS CLARIFICATION" ]] && parts+=("$lang")
[[ -n "$framework" && "$framework" != "NEEDS CLARIFICATION" && "$framework" != "N/A" ]] && parts+=("$framework")
# Join with proper formatting
if [[ ${#parts[@]} -eq 0 ]]; then
echo ""
elif [[ ${#parts[@]} -eq 1 ]]; then
echo "${parts[0]}"
else
# Join multiple parts with " + "
local result="${parts[0]}"
for ((i=1; i<${#parts[@]}; i++)); do
result="$result + ${parts[i]}"
done
echo "$result"
fi
}
#==============================================================================
# Template and Content Generation Functions
#==============================================================================
get_project_structure() {
local project_type="$1"
if [[ "$project_type" == *"web"* ]]; then
echo "backend/\\nfrontend/\\ntests/"
else
echo "src/\\ntests/"
fi
}
get_commands_for_language() {
local lang="$1"
case "$lang" in
*"Python"*)
echo "cd src && pytest && ruff check ."
;;
*"Rust"*)
echo "cargo test && cargo clippy"
;;
*"JavaScript"*|*"TypeScript"*)
echo "npm test \\&\\& npm run lint"
;;
*)
echo "# Add commands for $lang"
;;
esac
}
get_language_conventions() {
local lang="$1"
echo "$lang: Follow standard conventions"
}
create_new_agent_file() {
local target_file="$1"
local temp_file="$2"
local project_name="$3"
local current_date="$4"
if [[ ! -f "$TEMPLATE_FILE" ]]; then
log_error "Template not found at $TEMPLATE_FILE"
return 1
fi
if [[ ! -r "$TEMPLATE_FILE" ]]; then
log_error "Template file is not readable: $TEMPLATE_FILE"
return 1
fi
log_info "Creating new agent context file from template..."
if ! cp "$TEMPLATE_FILE" "$temp_file"; then
log_error "Failed to copy template file"
return 1
fi
# Replace template placeholders
local project_structure
project_structure=$(get_project_structure "$NEW_PROJECT_TYPE")
local commands
commands=$(get_commands_for_language "$NEW_LANG")
local language_conventions
language_conventions=$(get_language_conventions "$NEW_LANG")
# Perform substitutions with error checking using safer approach
# Escape special characters for sed by using a different delimiter or escaping
local escaped_lang=$(printf '%s\n' "$NEW_LANG" | sed 's/[\[\.*^$()+{}|]/\\&/g')
local escaped_framework=$(printf '%s\n' "$NEW_FRAMEWORK" | sed 's/[\[\.*^$()+{}|]/\\&/g')
local escaped_branch=$(printf '%s\n' "$CURRENT_BRANCH" | sed 's/[\[\.*^$()+{}|]/\\&/g')
# Build technology stack and recent change strings conditionally
local tech_stack
if [[ -n "$escaped_lang" && -n "$escaped_framework" ]]; then
tech_stack="- $escaped_lang + $escaped_framework ($escaped_branch)"
elif [[ -n "$escaped_lang" ]]; then
tech_stack="- $escaped_lang ($escaped_branch)"
elif [[ -n "$escaped_framework" ]]; then
tech_stack="- $escaped_framework ($escaped_branch)"
else
tech_stack="- ($escaped_branch)"
fi
local recent_change
if [[ -n "$escaped_lang" && -n "$escaped_framework" ]]; then
recent_change="- $escaped_branch: Added $escaped_lang + $escaped_framework"
elif [[ -n "$escaped_lang" ]]; then
recent_change="- $escaped_branch: Added $escaped_lang"
elif [[ -n "$escaped_framework" ]]; then
recent_change="- $escaped_branch: Added $escaped_framework"
else
recent_change="- $escaped_branch: Added"
fi
local substitutions=(
"s|\[PROJECT NAME\]|$project_name|"
"s|\[DATE\]|$current_date|"
"s|\[EXTRACTED FROM ALL PLAN.MD FILES\]|$tech_stack|"
"s|\[ACTUAL STRUCTURE FROM PLANS\]|$project_structure|g"
"s|\[ONLY COMMANDS FOR ACTIVE TECHNOLOGIES\]|$commands|"
"s|\[LANGUAGE-SPECIFIC, ONLY FOR LANGUAGES IN USE\]|$language_conventions|"
"s|\[LAST 3 FEATURES AND WHAT THEY ADDED\]|$recent_change|"
)
for substitution in "${substitutions[@]}"; do
if ! sed -i.bak -e "$substitution" "$temp_file"; then
log_error "Failed to perform substitution: $substitution"
rm -f "$temp_file" "$temp_file.bak"
return 1
fi
done
# Convert \n sequences to actual newlines
newline=$(printf '\n')
sed -i.bak2 "s/\\\\n/${newline}/g" "$temp_file"
# Clean up backup files
rm -f "$temp_file.bak" "$temp_file.bak2"
return 0
}
update_existing_agent_file() {
local target_file="$1"
local current_date="$2"
log_info "Updating existing agent context file..."
# Use a single temporary file for atomic update
local temp_file
temp_file=$(mktemp) || {
log_error "Failed to create temporary file"
return 1
}
# Process the file in one pass
local tech_stack=$(format_technology_stack "$NEW_LANG" "$NEW_FRAMEWORK")
local new_tech_entries=()
local new_change_entry=""
# Prepare new technology entries
if [[ -n "$tech_stack" ]] && ! grep -q "$tech_stack" "$target_file"; then
new_tech_entries+=("- $tech_stack ($CURRENT_BRANCH)")
fi
if [[ -n "$NEW_DB" ]] && [[ "$NEW_DB" != "N/A" ]] && [[ "$NEW_DB" != "NEEDS CLARIFICATION" ]] && ! grep -q "$NEW_DB" "$target_file"; then
new_tech_entries+=("- $NEW_DB ($CURRENT_BRANCH)")
fi
# Prepare new change entry
if [[ -n "$tech_stack" ]]; then
new_change_entry="- $CURRENT_BRANCH: Added $tech_stack"
elif [[ -n "$NEW_DB" ]] && [[ "$NEW_DB" != "N/A" ]] && [[ "$NEW_DB" != "NEEDS CLARIFICATION" ]]; then
new_change_entry="- $CURRENT_BRANCH: Added $NEW_DB"
fi
# Check if sections exist in the file
local has_active_technologies=0
local has_recent_changes=0
if grep -q "^## Active Technologies" "$target_file" 2>/dev/null; then
has_active_technologies=1
fi
if grep -q "^## Recent Changes" "$target_file" 2>/dev/null; then
has_recent_changes=1
fi
# Process file line by line
local in_tech_section=false
local in_changes_section=false
local tech_entries_added=false
local changes_entries_added=false
local existing_changes_count=0
local file_ended=false
while IFS= read -r line || [[ -n "$line" ]]; do
# Handle Active Technologies section
if [[ "$line" == "## Active Technologies" ]]; then
echo "$line" >> "$temp_file"
in_tech_section=true
continue
elif [[ $in_tech_section == true ]] && [[ "$line" =~ ^##[[:space:]] ]]; then
# Add new tech entries before closing the section
if [[ $tech_entries_added == false ]] && [[ ${#new_tech_entries[@]} -gt 0 ]]; then
printf '%s\n' "${new_tech_entries[@]}" >> "$temp_file"
tech_entries_added=true
fi
echo "$line" >> "$temp_file"
in_tech_section=false
continue
elif [[ $in_tech_section == true ]] && [[ -z "$line" ]]; then
# Add new tech entries before empty line in tech section
if [[ $tech_entries_added == false ]] && [[ ${#new_tech_entries[@]} -gt 0 ]]; then
printf '%s\n' "${new_tech_entries[@]}" >> "$temp_file"
tech_entries_added=true
fi
echo "$line" >> "$temp_file"
continue
fi
# Handle Recent Changes section
if [[ "$line" == "## Recent Changes" ]]; then
echo "$line" >> "$temp_file"
# Add new change entry right after the heading
if [[ -n "$new_change_entry" ]]; then
echo "$new_change_entry" >> "$temp_file"
fi
in_changes_section=true
changes_entries_added=true
continue
elif [[ $in_changes_section == true ]] && [[ "$line" =~ ^##[[:space:]] ]]; then
echo "$line" >> "$temp_file"
in_changes_section=false
continue
elif [[ $in_changes_section == true ]] && [[ "$line" == "- "* ]]; then
# Keep only first 2 existing changes
if [[ $existing_changes_count -lt 2 ]]; then
echo "$line" >> "$temp_file"
((existing_changes_count++))
fi
continue
fi
# Update timestamp
if [[ "$line" =~ \*\*Last\ updated\*\*:.*[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] ]]; then
echo "$line" | sed "s/[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]/$current_date/" >> "$temp_file"
else
echo "$line" >> "$temp_file"
fi
done < "$target_file"
# Post-loop check: if we're still in the Active Technologies section and haven't added new entries
if [[ $in_tech_section == true ]] && [[ $tech_entries_added == false ]] && [[ ${#new_tech_entries[@]} -gt 0 ]]; then
printf '%s\n' "${new_tech_entries[@]}" >> "$temp_file"
tech_entries_added=true
fi
# If sections don't exist, add them at the end of the file
if [[ $has_active_technologies -eq 0 ]] && [[ ${#new_tech_entries[@]} -gt 0 ]]; then
echo "" >> "$temp_file"
echo "## Active Technologies" >> "$temp_file"
printf '%s\n' "${new_tech_entries[@]}" >> "$temp_file"
tech_entries_added=true
fi
if [[ $has_recent_changes -eq 0 ]] && [[ -n "$new_change_entry" ]]; then
echo "" >> "$temp_file"
echo "## Recent Changes" >> "$temp_file"
echo "$new_change_entry" >> "$temp_file"
changes_entries_added=true
fi
# Move temp file to target atomically
if ! mv "$temp_file" "$target_file"; then
log_error "Failed to update target file"
rm -f "$temp_file"
return 1
fi
return 0
}
#==============================================================================
# Main Agent File Update Function
#==============================================================================
update_agent_file() {
local target_file="$1"
local agent_name="$2"
if [[ -z "$target_file" ]] || [[ -z "$agent_name" ]]; then
log_error "update_agent_file requires target_file and agent_name parameters"
return 1
fi
log_info "Updating $agent_name context file: $target_file"
local project_name
project_name=$(basename "$REPO_ROOT")
local current_date
current_date=$(date +%Y-%m-%d)
# Create directory if it doesn't exist
local target_dir
target_dir=$(dirname "$target_file")
if [[ ! -d "$target_dir" ]]; then
if ! mkdir -p "$target_dir"; then
log_error "Failed to create directory: $target_dir"
return 1
fi
fi
if [[ ! -f "$target_file" ]]; then
# Create new file from template
local temp_file
temp_file=$(mktemp) || {
log_error "Failed to create temporary file"
return 1
}
if create_new_agent_file "$target_file" "$temp_file" "$project_name" "$current_date"; then
if mv "$temp_file" "$target_file"; then
log_success "Created new $agent_name context file"
else
log_error "Failed to move temporary file to $target_file"
rm -f "$temp_file"
return 1
fi
else
log_error "Failed to create new agent file"
rm -f "$temp_file"
return 1
fi
else
# Update existing file
if [[ ! -r "$target_file" ]]; then
log_error "Cannot read existing file: $target_file"
return 1
fi
if [[ ! -w "$target_file" ]]; then
log_error "Cannot write to existing file: $target_file"
return 1
fi
if update_existing_agent_file "$target_file" "$current_date"; then
log_success "Updated existing $agent_name context file"
else
log_error "Failed to update existing agent file"
return 1
fi
fi
return 0
}
#==============================================================================
# Agent Selection and Processing
#==============================================================================
update_specific_agent() {
local agent_type="$1"
case "$agent_type" in
claude)
update_agent_file "$CLAUDE_FILE" "Claude Code"
;;
gemini)
update_agent_file "$GEMINI_FILE" "Gemini CLI"
;;
copilot)
update_agent_file "$COPILOT_FILE" "GitHub Copilot"
;;
cursor-agent)
update_agent_file "$CURSOR_FILE" "Cursor IDE"
;;
qwen)
update_agent_file "$QWEN_FILE" "Qwen Code"
;;
opencode)
update_agent_file "$AGENTS_FILE" "opencode"
;;
codex)
update_agent_file "$AGENTS_FILE" "Codex CLI"
;;
windsurf)
update_agent_file "$WINDSURF_FILE" "Windsurf"
;;
kilocode)
update_agent_file "$KILOCODE_FILE" "Kilo Code"
;;
auggie)
update_agent_file "$AUGGIE_FILE" "Auggie CLI"
;;
roo)
update_agent_file "$ROO_FILE" "Roo Code"
;;
codebuddy)
update_agent_file "$CODEBUDDY_FILE" "CodeBuddy CLI"
;;
qoder)
update_agent_file "$QODER_FILE" "Qoder CLI"
;;
amp)
update_agent_file "$AMP_FILE" "Amp"
;;
shai)
update_agent_file "$SHAI_FILE" "SHAI"
;;
q)
update_agent_file "$Q_FILE" "Amazon Q Developer CLI"
;;
bob)
update_agent_file "$BOB_FILE" "IBM Bob"
;;
*)
log_error "Unknown agent type '$agent_type'"
log_error "Expected: claude|gemini|copilot|cursor-agent|qwen|opencode|codex|windsurf|kilocode|auggie|roo|amp|shai|q|bob|qoder"
exit 1
;;
esac
}
update_all_existing_agents() {
local found_agent=false
# Check each possible agent file and update if it exists
if [[ -f "$CLAUDE_FILE" ]]; then
update_agent_file "$CLAUDE_FILE" "Claude Code"
found_agent=true
fi
if [[ -f "$GEMINI_FILE" ]]; then
update_agent_file "$GEMINI_FILE" "Gemini CLI"
found_agent=true
fi
if [[ -f "$COPILOT_FILE" ]]; then
update_agent_file "$COPILOT_FILE" "GitHub Copilot"
found_agent=true
fi
if [[ -f "$CURSOR_FILE" ]]; then
update_agent_file "$CURSOR_FILE" "Cursor IDE"
found_agent=true
fi
if [[ -f "$QWEN_FILE" ]]; then
update_agent_file "$QWEN_FILE" "Qwen Code"
found_agent=true
fi
if [[ -f "$AGENTS_FILE" ]]; then
update_agent_file "$AGENTS_FILE" "Codex/opencode"
found_agent=true
fi
if [[ -f "$WINDSURF_FILE" ]]; then
update_agent_file "$WINDSURF_FILE" "Windsurf"
found_agent=true
fi
if [[ -f "$KILOCODE_FILE" ]]; then
update_agent_file "$KILOCODE_FILE" "Kilo Code"
found_agent=true
fi
if [[ -f "$AUGGIE_FILE" ]]; then
update_agent_file "$AUGGIE_FILE" "Auggie CLI"
found_agent=true
fi
if [[ -f "$ROO_FILE" ]]; then
update_agent_file "$ROO_FILE" "Roo Code"
found_agent=true
fi
if [[ -f "$CODEBUDDY_FILE" ]]; then
update_agent_file "$CODEBUDDY_FILE" "CodeBuddy CLI"
found_agent=true
fi
if [[ -f "$SHAI_FILE" ]]; then
update_agent_file "$SHAI_FILE" "SHAI"
found_agent=true
fi
if [[ -f "$QODER_FILE" ]]; then
update_agent_file "$QODER_FILE" "Qoder CLI"
found_agent=true
fi
if [[ -f "$Q_FILE" ]]; then
update_agent_file "$Q_FILE" "Amazon Q Developer CLI"
found_agent=true
fi
if [[ -f "$BOB_FILE" ]]; then
update_agent_file "$BOB_FILE" "IBM Bob"
found_agent=true
fi
# If no agent files exist, create a default Claude file
if [[ "$found_agent" == false ]]; then
log_info "No existing agent files found, creating default Claude file..."
update_agent_file "$CLAUDE_FILE" "Claude Code"
fi
}
print_summary() {
echo
log_info "Summary of changes:"
if [[ -n "$NEW_LANG" ]]; then
echo " - Added language: $NEW_LANG"
fi
if [[ -n "$NEW_FRAMEWORK" ]]; then
echo " - Added framework: $NEW_FRAMEWORK"
fi
if [[ -n "$NEW_DB" ]] && [[ "$NEW_DB" != "N/A" ]]; then
echo " - Added database: $NEW_DB"
fi
echo
log_info "Usage: $0 [claude|gemini|copilot|cursor-agent|qwen|opencode|codex|windsurf|kilocode|auggie|codebuddy|shai|q|bob|qoder]"
}
#==============================================================================
# Main Execution
#==============================================================================
main() {
# Validate environment before proceeding
validate_environment
log_info "=== Updating agent context files for feature $CURRENT_BRANCH ==="
# Parse the plan file to extract project information
if ! parse_plan_data "$NEW_PLAN"; then
log_error "Failed to parse plan data"
exit 1
fi
# Process based on agent type argument
local success=true
if [[ -z "$AGENT_TYPE" ]]; then
# No specific agent provided - update all existing agent files
log_info "No agent specified, updating all existing agent files..."
if ! update_all_existing_agents; then
success=false
fi
else
# Specific agent provided - update only that agent
log_info "Updating specific agent: $AGENT_TYPE"
if ! update_specific_agent "$AGENT_TYPE"; then
success=false
fi
fi
# Print summary
print_summary
if [[ "$success" == true ]]; then
log_success "Agent context update completed successfully"
exit 0
else
log_error "Agent context update completed with errors"
exit 1
fi
}
# Execute main function if script is run directly
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
main "$@"
fi

View File

@@ -0,0 +1,28 @@
# [PROJECT NAME] Development Guidelines
Auto-generated from all feature plans. Last updated: [DATE]
## Active Technologies
[EXTRACTED FROM ALL PLAN.MD FILES]
## Project Structure
```text
[ACTUAL STRUCTURE FROM PLANS]
```
## Commands
[ONLY COMMANDS FOR ACTIVE TECHNOLOGIES]
## Code Style
[LANGUAGE-SPECIFIC, ONLY FOR LANGUAGES IN USE]
## Recent Changes
[LAST 3 FEATURES AND WHAT THEY ADDED]
<!-- MANUAL ADDITIONS START -->
<!-- MANUAL ADDITIONS END -->

View File

@@ -0,0 +1,40 @@
# [CHECKLIST TYPE] Checklist: [FEATURE NAME]
**Purpose**: [Brief description of what this checklist covers]
**Created**: [DATE]
**Feature**: [Link to spec.md or relevant documentation]
**Note**: This checklist is generated by the `/speckit.checklist` command based on feature context and requirements.
<!--
============================================================================
IMPORTANT: The checklist items below are SAMPLE ITEMS for illustration only.
The /speckit.checklist command MUST replace these with actual items based on:
- User's specific checklist request
- Feature requirements from spec.md
- Technical context from plan.md
- Implementation details from tasks.md
DO NOT keep these sample items in the generated checklist file.
============================================================================
-->
## [Category 1]
- [ ] CHK001 First checklist item with clear action
- [ ] CHK002 Second checklist item
- [ ] CHK003 Third checklist item
## [Category 2]
- [ ] CHK004 Another category item
- [ ] CHK005 Item with specific criteria
- [ ] CHK006 Final item in this category
## Notes
- Check items off as completed: `[x]`
- Add comments or findings inline
- Link to relevant resources or documentation
- Items are numbered sequentially for easy reference

View File

@@ -0,0 +1,104 @@
# Implementation Plan: [FEATURE]
**Branch**: `[###-feature-name]` | **Date**: [DATE] | **Spec**: [link]
**Input**: Feature specification from `/specs/[###-feature-name]/spec.md`
**Note**: This template is filled in by the `/speckit.plan` command. See `.specify/templates/commands/plan.md` for the execution workflow.
## Summary
[Extract from feature spec: primary requirement + technical approach from research]
## Technical Context
<!--
ACTION REQUIRED: Replace the content in this section with the technical details
for the project. The structure here is presented in advisory capacity to guide
the iteration process.
-->
**Language/Version**: [e.g., Python 3.11, Swift 5.9, Rust 1.75 or NEEDS CLARIFICATION]
**Primary Dependencies**: [e.g., FastAPI, UIKit, LLVM or NEEDS CLARIFICATION]
**Storage**: [if applicable, e.g., PostgreSQL, CoreData, files or N/A]
**Testing**: [e.g., pytest, XCTest, cargo test or NEEDS CLARIFICATION]
**Target Platform**: [e.g., Linux server, iOS 15+, WASM or NEEDS CLARIFICATION]
**Project Type**: [single/web/mobile - determines source structure]
**Performance Goals**: [domain-specific, e.g., 1000 req/s, 10k lines/sec, 60 fps or NEEDS CLARIFICATION]
**Constraints**: [domain-specific, e.g., <200ms p95, <100MB memory, offline-capable or NEEDS CLARIFICATION]
**Scale/Scope**: [domain-specific, e.g., 10k users, 1M LOC, 50 screens or NEEDS CLARIFICATION]
## Constitution Check
*GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.*
[Gates determined based on constitution file]
## Project Structure
### Documentation (this feature)
```text
specs/[###-feature]/
├── plan.md # This file (/speckit.plan command output)
├── research.md # Phase 0 output (/speckit.plan command)
├── data-model.md # Phase 1 output (/speckit.plan command)
├── quickstart.md # Phase 1 output (/speckit.plan command)
├── contracts/ # Phase 1 output (/speckit.plan command)
└── tasks.md # Phase 2 output (/speckit.tasks command - NOT created by /speckit.plan)
```
### Source Code (repository root)
<!--
ACTION REQUIRED: Replace the placeholder tree below with the concrete layout
for this feature. Delete unused options and expand the chosen structure with
real paths (e.g., apps/admin, packages/something). The delivered plan must
not include Option labels.
-->
```text
# [REMOVE IF UNUSED] Option 1: Single project (DEFAULT)
src/
├── models/
├── services/
├── cli/
└── lib/
tests/
├── contract/
├── integration/
└── unit/
# [REMOVE IF UNUSED] Option 2: Web application (when "frontend" + "backend" detected)
backend/
├── src/
│ ├── models/
│ ├── services/
│ └── api/
└── tests/
frontend/
├── src/
│ ├── components/
│ ├── pages/
│ └── services/
└── tests/
# [REMOVE IF UNUSED] Option 3: Mobile + API (when "iOS/Android" detected)
api/
└── [same as backend above]
ios/ or android/
└── [platform-specific structure: feature modules, UI flows, platform tests]
```
**Structure Decision**: [Document the selected structure and reference the real
directories captured above]
## Complexity Tracking
> **Fill ONLY if Constitution Check has violations that must be justified**
| Violation | Why Needed | Simpler Alternative Rejected Because |
|-----------|------------|-------------------------------------|
| [e.g., 4th project] | [current need] | [why 3 projects insufficient] |
| [e.g., Repository pattern] | [specific problem] | [why direct DB access insufficient] |

View File

@@ -0,0 +1,115 @@
# Feature Specification: [FEATURE NAME]
**Feature Branch**: `[###-feature-name]`
**Created**: [DATE]
**Status**: Draft
**Input**: User description: "$ARGUMENTS"
## User Scenarios & Testing *(mandatory)*
<!--
IMPORTANT: User stories should be PRIORITIZED as user journeys ordered by importance.
Each user story/journey must be INDEPENDENTLY TESTABLE - meaning if you implement just ONE of them,
you should still have a viable MVP (Minimum Viable Product) that delivers value.
Assign priorities (P1, P2, P3, etc.) to each story, where P1 is the most critical.
Think of each story as a standalone slice of functionality that can be:
- Developed independently
- Tested independently
- Deployed independently
- Demonstrated to users independently
-->
### User Story 1 - [Brief Title] (Priority: P1)
[Describe this user journey in plain language]
**Why this priority**: [Explain the value and why it has this priority level]
**Independent Test**: [Describe how this can be tested independently - e.g., "Can be fully tested by [specific action] and delivers [specific value]"]
**Acceptance Scenarios**:
1. **Given** [initial state], **When** [action], **Then** [expected outcome]
2. **Given** [initial state], **When** [action], **Then** [expected outcome]
---
### User Story 2 - [Brief Title] (Priority: P2)
[Describe this user journey in plain language]
**Why this priority**: [Explain the value and why it has this priority level]
**Independent Test**: [Describe how this can be tested independently]
**Acceptance Scenarios**:
1. **Given** [initial state], **When** [action], **Then** [expected outcome]
---
### User Story 3 - [Brief Title] (Priority: P3)
[Describe this user journey in plain language]
**Why this priority**: [Explain the value and why it has this priority level]
**Independent Test**: [Describe how this can be tested independently]
**Acceptance Scenarios**:
1. **Given** [initial state], **When** [action], **Then** [expected outcome]
---
[Add more user stories as needed, each with an assigned priority]
### Edge Cases
<!--
ACTION REQUIRED: The content in this section represents placeholders.
Fill them out with the right edge cases.
-->
- What happens when [boundary condition]?
- How does system handle [error scenario]?
## Requirements *(mandatory)*
<!--
ACTION REQUIRED: The content in this section represents placeholders.
Fill them out with the right functional requirements.
-->
### Functional Requirements
- **FR-001**: System MUST [specific capability, e.g., "allow users to create accounts"]
- **FR-002**: System MUST [specific capability, e.g., "validate email addresses"]
- **FR-003**: Users MUST be able to [key interaction, e.g., "reset their password"]
- **FR-004**: System MUST [data requirement, e.g., "persist user preferences"]
- **FR-005**: System MUST [behavior, e.g., "log all security events"]
*Example of marking unclear requirements:*
- **FR-006**: System MUST authenticate users via [NEEDS CLARIFICATION: auth method not specified - email/password, SSO, OAuth?]
- **FR-007**: System MUST retain user data for [NEEDS CLARIFICATION: retention period not specified]
### Key Entities *(include if feature involves data)*
- **[Entity 1]**: [What it represents, key attributes without implementation]
- **[Entity 2]**: [What it represents, relationships to other entities]
## Success Criteria *(mandatory)*
<!--
ACTION REQUIRED: Define measurable success criteria.
These must be technology-agnostic and measurable.
-->
### Measurable Outcomes
- **SC-001**: [Measurable metric, e.g., "Users can complete account creation in under 2 minutes"]
- **SC-002**: [Measurable metric, e.g., "System handles 1000 concurrent users without degradation"]
- **SC-003**: [User satisfaction metric, e.g., "90% of users successfully complete primary task on first attempt"]
- **SC-004**: [Business metric, e.g., "Reduce support tickets related to [X] by 50%"]

View File

@@ -0,0 +1,251 @@
---
description: "Task list template for feature implementation"
---
# Tasks: [FEATURE NAME]
**Input**: Design documents from `/specs/[###-feature-name]/`
**Prerequisites**: plan.md (required), spec.md (required for user stories), research.md, data-model.md, contracts/
**Tests**: The examples below include test tasks. Tests are OPTIONAL - only include them if explicitly requested in the feature specification.
**Organization**: Tasks are grouped by user story to enable independent implementation and testing of each story.
## Format: `[ID] [P?] [Story] Description`
- **[P]**: Can run in parallel (different files, no dependencies)
- **[Story]**: Which user story this task belongs to (e.g., US1, US2, US3)
- Include exact file paths in descriptions
## Path Conventions
- **Single project**: `src/`, `tests/` at repository root
- **Web app**: `backend/src/`, `frontend/src/`
- **Mobile**: `api/src/`, `ios/src/` or `android/src/`
- Paths shown below assume single project - adjust based on plan.md structure
<!--
============================================================================
IMPORTANT: The tasks below are SAMPLE TASKS for illustration purposes only.
The /speckit.tasks command MUST replace these with actual tasks based on:
- User stories from spec.md (with their priorities P1, P2, P3...)
- Feature requirements from plan.md
- Entities from data-model.md
- Endpoints from contracts/
Tasks MUST be organized by user story so each story can be:
- Implemented independently
- Tested independently
- Delivered as an MVP increment
DO NOT keep these sample tasks in the generated tasks.md file.
============================================================================
-->
## Phase 1: Setup (Shared Infrastructure)
**Purpose**: Project initialization and basic structure
- [ ] T001 Create project structure per implementation plan
- [ ] T002 Initialize [language] project with [framework] dependencies
- [ ] T003 [P] Configure linting and formatting tools
---
## Phase 2: Foundational (Blocking Prerequisites)
**Purpose**: Core infrastructure that MUST be complete before ANY user story can be implemented
**⚠️ CRITICAL**: No user story work can begin until this phase is complete
Examples of foundational tasks (adjust based on your project):
- [ ] T004 Setup database schema and migrations framework
- [ ] T005 [P] Implement authentication/authorization framework
- [ ] T006 [P] Setup API routing and middleware structure
- [ ] T007 Create base models/entities that all stories depend on
- [ ] T008 Configure error handling and logging infrastructure
- [ ] T009 Setup environment configuration management
**Checkpoint**: Foundation ready - user story implementation can now begin in parallel
---
## Phase 3: User Story 1 - [Title] (Priority: P1) 🎯 MVP
**Goal**: [Brief description of what this story delivers]
**Independent Test**: [How to verify this story works on its own]
### Tests for User Story 1 (OPTIONAL - only if tests requested) ⚠️
> **NOTE: Write these tests FIRST, ensure they FAIL before implementation**
- [ ] T010 [P] [US1] Contract test for [endpoint] in tests/contract/test_[name].py
- [ ] T011 [P] [US1] Integration test for [user journey] in tests/integration/test_[name].py
### Implementation for User Story 1
- [ ] T012 [P] [US1] Create [Entity1] model in src/models/[entity1].py
- [ ] T013 [P] [US1] Create [Entity2] model in src/models/[entity2].py
- [ ] T014 [US1] Implement [Service] in src/services/[service].py (depends on T012, T013)
- [ ] T015 [US1] Implement [endpoint/feature] in src/[location]/[file].py
- [ ] T016 [US1] Add validation and error handling
- [ ] T017 [US1] Add logging for user story 1 operations
**Checkpoint**: At this point, User Story 1 should be fully functional and testable independently
---
## Phase 4: User Story 2 - [Title] (Priority: P2)
**Goal**: [Brief description of what this story delivers]
**Independent Test**: [How to verify this story works on its own]
### Tests for User Story 2 (OPTIONAL - only if tests requested) ⚠️
- [ ] T018 [P] [US2] Contract test for [endpoint] in tests/contract/test_[name].py
- [ ] T019 [P] [US2] Integration test for [user journey] in tests/integration/test_[name].py
### Implementation for User Story 2
- [ ] T020 [P] [US2] Create [Entity] model in src/models/[entity].py
- [ ] T021 [US2] Implement [Service] in src/services/[service].py
- [ ] T022 [US2] Implement [endpoint/feature] in src/[location]/[file].py
- [ ] T023 [US2] Integrate with User Story 1 components (if needed)
**Checkpoint**: At this point, User Stories 1 AND 2 should both work independently
---
## Phase 5: User Story 3 - [Title] (Priority: P3)
**Goal**: [Brief description of what this story delivers]
**Independent Test**: [How to verify this story works on its own]
### Tests for User Story 3 (OPTIONAL - only if tests requested) ⚠️
- [ ] T024 [P] [US3] Contract test for [endpoint] in tests/contract/test_[name].py
- [ ] T025 [P] [US3] Integration test for [user journey] in tests/integration/test_[name].py
### Implementation for User Story 3
- [ ] T026 [P] [US3] Create [Entity] model in src/models/[entity].py
- [ ] T027 [US3] Implement [Service] in src/services/[service].py
- [ ] T028 [US3] Implement [endpoint/feature] in src/[location]/[file].py
**Checkpoint**: All user stories should now be independently functional
---
[Add more user story phases as needed, following the same pattern]
---
## Phase N: Polish & Cross-Cutting Concerns
**Purpose**: Improvements that affect multiple user stories
- [ ] TXXX [P] Documentation updates in docs/
- [ ] TXXX Code cleanup and refactoring
- [ ] TXXX Performance optimization across all stories
- [ ] TXXX [P] Additional unit tests (if requested) in tests/unit/
- [ ] TXXX Security hardening
- [ ] TXXX Run quickstart.md validation
---
## Dependencies & Execution Order
### Phase Dependencies
- **Setup (Phase 1)**: No dependencies - can start immediately
- **Foundational (Phase 2)**: Depends on Setup completion - BLOCKS all user stories
- **User Stories (Phase 3+)**: All depend on Foundational phase completion
- User stories can then proceed in parallel (if staffed)
- Or sequentially in priority order (P1 → P2 → P3)
- **Polish (Final Phase)**: Depends on all desired user stories being complete
### User Story Dependencies
- **User Story 1 (P1)**: Can start after Foundational (Phase 2) - No dependencies on other stories
- **User Story 2 (P2)**: Can start after Foundational (Phase 2) - May integrate with US1 but should be independently testable
- **User Story 3 (P3)**: Can start after Foundational (Phase 2) - May integrate with US1/US2 but should be independently testable
### Within Each User Story
- Tests (if included) MUST be written and FAIL before implementation
- Models before services
- Services before endpoints
- Core implementation before integration
- Story complete before moving to next priority
### Parallel Opportunities
- All Setup tasks marked [P] can run in parallel
- All Foundational tasks marked [P] can run in parallel (within Phase 2)
- Once Foundational phase completes, all user stories can start in parallel (if team capacity allows)
- All tests for a user story marked [P] can run in parallel
- Models within a story marked [P] can run in parallel
- Different user stories can be worked on in parallel by different team members
---
## Parallel Example: User Story 1
```bash
# Launch all tests for User Story 1 together (if tests requested):
Task: "Contract test for [endpoint] in tests/contract/test_[name].py"
Task: "Integration test for [user journey] in tests/integration/test_[name].py"
# Launch all models for User Story 1 together:
Task: "Create [Entity1] model in src/models/[entity1].py"
Task: "Create [Entity2] model in src/models/[entity2].py"
```
---
## Implementation Strategy
### MVP First (User Story 1 Only)
1. Complete Phase 1: Setup
2. Complete Phase 2: Foundational (CRITICAL - blocks all stories)
3. Complete Phase 3: User Story 1
4. **STOP and VALIDATE**: Test User Story 1 independently
5. Deploy/demo if ready
### Incremental Delivery
1. Complete Setup + Foundational → Foundation ready
2. Add User Story 1 → Test independently → Deploy/Demo (MVP!)
3. Add User Story 2 → Test independently → Deploy/Demo
4. Add User Story 3 → Test independently → Deploy/Demo
5. Each story adds value without breaking previous stories
### Parallel Team Strategy
With multiple developers:
1. Team completes Setup + Foundational together
2. Once Foundational is done:
- Developer A: User Story 1
- Developer B: User Story 2
- Developer C: User Story 3
3. Stories complete and integrate independently
---
## Notes
- [P] tasks = different files, no dependencies
- [Story] label maps task to specific user story for traceability
- Each user story should be independently completable and testable
- Verify tests fail before implementing
- Commit after each task or logical group
- Stop at any checkpoint to validate story independently
- Avoid: vague tasks, same file conflicts, cross-story dependencies that break independence

34
AGENTS.md Normal file
View File

@@ -0,0 +1,34 @@
# NixOS Development Guidelines (AI Agents)
Auto-generated from feature plans. Last updated: 2026-01-30
## Active Technologies
- Python 3.12 + MCP server library (Python, JSON-RPC/stdin transport), click for CLI entrypoint, pytest + coverage for tests, ruff/black for lint/format (001-mcp-server)
- None (in-memory tool definitions; filesystem access for repo interactions) (001-mcp-server)
- Documentation set (AI-facing constitution and playbooks) in Markdown (001-ai-docs)
## Project Structure
```text
docs/ # Constitution and playbooks for AI guidance
specs/001-ai-docs/ # Planning artifacts (plan, research, tasks, data model, contracts)
```
## Commands
- Primary work is authoring markdown; no build/test commands required beyond manual validation.
## Code Style & Rules
- Follow repo conventions: no blank lines between code blocks; comments only when non-obvious; factor duplication into shared helpers/functions in examples.
- Constitution is authoritative for AI guidance; if human docs conflict, update both with the recorded resolution.
- Keep language business-level and technology-agnostic in AI-facing docs.
## Recent Changes
- 001-mcp-server: Added Python 3.12 + MCP server library (Python, JSON-RPC/stdin transport), click for CLI entrypoint, pytest + coverage for tests, ruff/black for lint/format
- 001-ai-docs: Documentation-focused stack; added docs/ for constitution/playbooks and specs/001-ai-docs/ for planning outputs.
<!-- MANUAL ADDITIONS START -->
<!-- MANUAL ADDITIONS END -->

39
TODO.md Normal file
View File

@@ -0,0 +1,39 @@
# Keycloak SSO Rollout (Server)
## Compatible services to cover (assume up-to-date versions)
- Gitea (OAuth2/OIDC)
- Nextcloud (Social Login app)
- Paperless-ngx (OIDC)
- Mealie (OIDC v1+)
- Jellyfin (OIDC plugin)
- Kavita (OIDC-capable builds)
- Readeck (OIDC-capable builds)
- Audiobookshelf (OIDC-capable builds)
- Matrix Synapse intentionally excluded (see below) but natively OIDC if needed
## Explicit exclusions (no SSO for now)
- Syncplay
- Matrix/Synapse
- Arr stack (sonarr, radarr, lidarr, prowlarr, bazarr)
- qbittorrent
- sabnzbd
- metube
- multi-scrobbler
- microbin
- ryot
- maloja
- plex
- atticd
## Phased rollout plan
1) Base identity
- Add Keycloak deployment/module and realm/client defaults.
2) Gateway/proxy auth
- Add oauth2-proxy (Keycloak provider) + nginx auth_request for non-OIDC apps (e.g., homepage-dashboard, stash).
3) Native OIDC wiring
- Configure native OIDC services (Gitea, Nextcloud, Paperless, Mealie, Jellyfin/Kavita/Readeck/Audiobookshelf) with Keycloak clients.
4) Per-service rollout
- Enable per app in priority order; document client IDs/secrets and callback URLs.
5) Verification
- Smoke-test login flows and cache any needed public keys/metadata.

View File

@@ -9,7 +9,6 @@
{
imports = [
inputs.home-manager.nixosModules.home-manager
./users.nix
./jawz.nix
../modules/modules.nix
];
@@ -66,10 +65,13 @@
groups = {
users.gid = 100;
piracy.gid = 985;
core.gid = 1251;
glue.gid = 6969;
};
};
nixpkgs.config = {
allowUnfree = true;
allowUnfreePredicate = pkg: builtins.elem (lib.getName pkg) [ "corefonts" ];
permittedInsecurePackages = [
"aspnetcore-runtime-wrapped-6.0.36"
"aspnetcore-runtime-6.0.36"
@@ -152,6 +154,10 @@
enable = true;
nssmdns4 = true;
};
clamav = {
daemon.enable = true;
updater.enable = true;
};
openssh = {
enable = true;
openFirewall = true;
@@ -163,6 +169,40 @@
};
};
};
fonts.fontconfig.enable = true;
fonts = {
fontconfig.enable = true;
packages =
let
customFonts = pkgs.stdenvNoCC.mkDerivation {
name = "custom-fonts";
src = inputs.fonts;
installPhase = ''
mkdir -p $out/share/fonts
find $src -type f \( \
-name "*.ttf" -o \
-name "*.otf" -o \
-name "*.woff" -o \
-name "*.woff2" \
\) -exec cp {} $out/share/fonts/ \;
'';
};
in
builtins.attrValues {
inherit customFonts;
inherit (pkgs)
symbola
comic-neue
cascadia-code
corefonts
;
inherit (pkgs.nerd-fonts)
caskaydia-cove
open-dyslexic
comic-shanns-mono
iosevka
agave
;
};
};
powerManagement.cpuFreqGovernor = lib.mkDefault "performance";
}

70
config/derek.nix Normal file
View File

@@ -0,0 +1,70 @@
{
config,
lib,
pkgs,
inputs,
...
}:
let
enableForDerek = {
enable = true;
users = "bearded_dragonn";
};
in
{
my = {
stylix = enableForDerek;
emacs = enableForDerek;
apps = {
art = enableForDerek;
gaming = enableForDerek;
multimedia.videoEditing = enableForDerek;
};
dev = {
nix = enableForDerek;
python = enableForDerek;
sh = enableForDerek;
};
shell = {
exercism = enableForDerek;
tools = enableForDerek;
multimedia = enableForDerek;
};
};
sops.secrets = lib.mkIf config.my.secureHost {
derek-password.neededForUsers = true;
};
services = {
tailscale.enable = true;
open-webui.enable = lib.mkForce false;
ollama.enable = lib.mkForce false;
sunshine = {
enable = true;
autoStart = false;
capSysAdmin = true;
openFirewall = true;
};
};
users.users.bearded_dragonn = {
isNormalUser = true;
createHome = true;
hashedPasswordFile = config.sops.secrets.derek-password.path;
packages = builtins.attrValues {
inherit (pkgs)
bottles
vscode
nextcloud-client
warp
handbrake
;
inherit (inputs.prem2resolve.packages.x86_64-linux) prem2resolve;
};
extraGroups = [
"audio"
"video"
"input"
"games"
];
};
home-manager.users.bearded_dragonn.home.stateVersion = "23.05";
}

View File

@@ -61,6 +61,8 @@ in
"scanner"
"lp"
"piracy"
"core"
"glue"
"kavita"
"video"
"docker"

View File

@@ -10,6 +10,9 @@ in
_final: prev: {
handbrake = prev.handbrake.override { useGtk = true; };
ripgrep = prev.ripgrep.override { withPCRE2 = true; };
blender = prev.blender.override { cudaSupport = true; };
sunshine = prev.sunshine.override { cudaSupport = true; };
obs-studio = prev.obs-studio.override { cudaSupport = true; };
nautilus = prev.nautilus.overrideAttrs (old: {
buildInputs =
old.buildInputs
@@ -38,7 +41,7 @@ _final: prev: {
waybar = prev.waybar.overrideAttrs (old: {
mesonFlags = old.mesonFlags ++ [ "-Dexperimental=true" ];
});
qbittorrent = prev.qbittorrent.overrideAttrs (old: rec {
qbittorrent = prev.qbittorrent.overrideAttrs (_old: rec {
version = "5.1.3";
src = prev.fetchFromGitHub {
owner = "qbittorrent";

View File

@@ -71,7 +71,7 @@ in
paul = mkScheme {
color = "green";
name = "valua";
polarity = "light";
polarity = "dark";
image = "${wallpapers}/paul1.jpg";
base16Scheme = {
base00 = "#1a1f16"; # dark forest floor (was deep green-black)

View File

@@ -9,12 +9,19 @@ let
schemesFile = import ./schemes.nix {
inherit pkgs inputs;
};
scheme = schemesFile.schemes.jesus;
scheme = schemesFile.schemes.space;
cfg = config.my.stylix;
gnomeEnabled = config.services.desktopManager.gnome.enable;
in
{
options.my.stylix.enable = lib.mkEnableOption "system-wide theming with Stylix";
options.my.stylix = {
enable = lib.mkEnableOption "system-wide theming with Stylix";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.stylix;
description = "Users to apply Stylix theming for";
};
};
config = {
stylix = {
inherit (scheme) image polarity;
@@ -23,7 +30,7 @@ in
targets.qt.platform = lib.mkForce "qtct";
}
// lib.optionalAttrs (scheme ? base16Scheme) { inherit (scheme) base16Scheme; };
home-manager.users.jawz = {
home-manager.users = inputs.self.lib.mkHomeManagerUsers lib config.my.stylix.users (user: {
gtk = lib.mkIf (!cfg.enable && gnomeEnabled) {
enable = true;
iconTheme = {
@@ -37,16 +44,16 @@ in
inherit (cfg) enable;
autoEnable = cfg.enable;
iconTheme = {
inherit (cfg) enable;
enable = true;
package = scheme.iconPackage;
light = "Papirus-Light";
dark = "Papirus-Dark";
};
targets.librewolf = {
firefoxGnomeTheme.enable = true;
profileNames = [ "jawz" ];
profileNames = [ user ];
};
};
};
});
};
}

View File

@@ -1,13 +0,0 @@
{ ... }:
{
users.users = {
sonarr = {
uid = 274;
group = "piracy";
};
radarr = {
uid = 275;
group = "piracy";
};
};
}

64
docs/constitution.md Normal file
View File

@@ -0,0 +1,64 @@
# AI Constitution for the NixOS Repository
## Scope and Audience
- Audience: AI assistants and contributors needing an authoritative description of repository rules, structure, and workflows.
- Scope: Repo-wide conventions, module categories, host roles, secrets handling, proxy rules, documentation locations, and maintenance triggers.
- Authority: This constitution is the source of truth for AI. If human-facing docs differ, update both with the recorded resolution in `specs/001-ai-docs/research.md`.
## Repository Overview
- Architecture: Flake-based repo using `flake-parts` with inputs for pkgs (stable/unstable), stylix, home-manager, sops-nix, and service overlays. Common modules are composed through `parts/core.nix` and `parts/hosts.nix`.
- Module auto-import: `modules/modules.nix` auto-imports `.nix` files under `modules/apps`, `modules/dev`, `modules/scripts`, `modules/servers`, `modules/services`, `modules/shell`, and `modules/network`, excluding `librewolf.nix`. Factories live in `modules/factories/` (`mkserver`, `mkscript`), and shared options are in `modules/nix` and `modules/users`.
- Hosts and toggles: Host definitions live in `hosts/<name>/configuration.nix` with host-specific toggles in `hosts/<name>/toggles.nix`. The `my` namespace carries toggles for apps/dev/scripts/services/shell, feature flags like `enableProxy` and `enableContainers`, and per-host `interfaces` and `ips` maps.
- Main server and proxies: `my.mainServer` selects the host that should serve traffic by default (default `miniserver`; overridden to `server` in `hosts/server/toggles.nix`). Reverse proxies use helpers in `parts/core.nix` (`proxy`, `proxyReverse`, `proxyReverseFix`, `proxyReversePrivate`) and pick IPs from `my.ips` plus the hostName/ip set by `mkserver` options.
- Secure hosts and secrets: `my.secureHost` gates SOPS secrets. Secure hosts load secrets from `secrets/*.yaml` and wireguard definitions; non-secure hosts (e.g., `hosts/emacs`) skip secret-dependent services. Default SOPS file is `secrets/secrets.yaml` via `config/base.nix`.
## Coding Conventions
- No blank lines between code blocks; keep markdown examples tight.
- Minimize comments; prefer clear naming and shared helpers (`modules/factories/mkserver.nix`, `modules/factories/mkscript.nix`) to avoid duplication.
- Use business-level, technology-agnostic language in AI docs; reserve implementation detail for module code.
- Nix structure: flatten single-child attribute sets into their full path; keep multi-child sets nested for readability; merge siblings under a shared parent; flatten the shallowest subtree first to reduce indentation without losing clarity.
```nix
config.services.jellyfin.enable = true; # preferred single-leaf form
config.services = {
nginx.enable = true;
jellyfin = {
enable = true;
port = 1234;
};
};
```
## Terminology and Naming Standards
- Module: A Nix module under `modules/<category>/<name>.nix` auto-imported into the system.
- Factory: Shared option constructors in `modules/factories/` (use `mkserver` for server modules, `mkscript` for script units).
- Options: Settings under the `my` namespace (e.g., `my.services.<service>`, `my.scripts.<script>`).
- Toggles: Enablement maps in `hosts/<name>/toggles.nix` controlling categories (apps/dev/shell/scripts/services/servers/units) and features (`enableProxy`, `enableContainers`).
- Servers: Reverse-proxied services under `modules/servers/`, normally created with `mkserver` options.
- Scripts: Units defined via `mkscript` with `enable`, `install`, `service`, `users`, `timer`, and `package` fields.
- Playbooks: Workflow guides under `docs/playbooks/` for repeatable tasks.
- Reference map: Navigation index under `docs/reference/index.md` for paths and responsibilities.
## Secrets Map and secureHost Behavior
- Secrets files: `secrets/certs.yaml`, `secrets/env.yaml`, `secrets/gallery.yaml`, `secrets/homepage.yaml`, `secrets/keys.yaml`, `secrets/wireguard.yaml`, `secrets/secrets.yaml`, plus `secrets/ssh/` for host keys.
- Placement rules: Keep secrets aligned to their file purpose (certificates → `certs.yaml`; environment/service env vars → `env.yaml`; media/gallery creds → `gallery.yaml`; homepage widgets → `homepage.yaml`; SSH/private keys → `keys.yaml`; WireGuard peers → `wireguard.yaml`; misc defaults → `secrets.yaml`).
- secureHost gating: Only hosts with `my.secureHost = true` load SOPS secrets and WireGuard interfaces. Hosts with `secureHost = false` must avoid secret-dependent services and skip SOPS entries.
## Module Categories and Active Hosts
- Module categories: apps, dev, scripts, servers, services, shell, network, users, nix, patches. Factories sit in `modules/factories/` and are imported explicitly.
- Active hosts: `workstation`, `server`, `miniserver`, `galaxy`, `emacs`. Host roles and secure status are defined in `hosts/<name>/configuration.nix` and toggles in `hosts/<name>/toggles.nix`.
## Precedence and Conflict Resolution
- Precedence: This constitution is authoritative for AI. Human docs must be updated to match. If conflicts are found, align human docs to the constitution and log the resolution in `specs/001-ai-docs/research.md`.
- Conflict handling steps: identify the divergent rule, cite the source files, decide the authoritative rule per this constitution, update both the source file and the relevant doc, and record the decision and timestamp.
## Maintenance Triggers and Update Process
- Triggers: New factory/helper, new module category, new host, new toggle set, new proxy rule, new secret category/file, change to `my.mainServer` or `my.ips`, stylix scheme changes, or new auto-import filters.
- Update flow: (1) Amend the relevant module or toggle files; (2) Update `docs/constitution.md` for rules/terminology changes; (3) Update playbooks under `docs/playbooks/` affected by the change; (4) Update `docs/reference/index.md` for navigation paths; (5) Note the decision in `specs/001-ai-docs/research.md` and refresh `quickstart.md` if discoverability shifts.
- Validation: Confirm discoverability within two clicks (constitution → reference map/playbook), secrets map completeness, and alignment with success criteria SC-001SC-004.
## Quick Reference and Navigation
- Constitution: `docs/constitution.md` (this file)
- Reference map: `docs/reference/index.md` (paths, hosts, secrets, proxies, stylix)
- MCP server reference: `docs/reference/mcp-server.md` (tools, invocation, sync checks)
- Playbooks: `docs/playbooks/*.md` (add module/server/script/host toggle/secret, plus template)
- Planning artifacts: `specs/001-ai-docs/` (plan, research, data-model, quickstart, contracts)

View File

@@ -0,0 +1,18 @@
# Playbook: Add a Host Toggle
- Name: Add or adjust host toggles
- Purpose: Enable categories, services, or features per host in `hosts/<name>/toggles.nix`.
- Prerequisites: Identify host role (see Hosts and Roles), secureHost setting, and whether proxies/containers are required.
- Inputs: Toggle category (apps/dev/scripts/services/servers/units), users list, proxy/container flags, mainServer override, network interface names.
- Steps:
1. Open `hosts/<name>/toggles.nix` and adjust category maps using helper patterns (`enableList` with `mkEnabled`, `mkEnabledWithUsers`, or `mkEnabledIp`).
2. Set feature flags such as `enableProxy`, `enableContainers`, and `mainServer` when the host should own proxied services.
3. Add service toggles under `servers` with proxy/ip data as needed; align IPs to `my.ips` (e.g., `mkEnabledIp` for remote hosts).
4. Ensure `interfaces` entries exist for network-facing services and match `my.interfaces` defaults unless intentionally overridden.
5. Reconcile toggle changes with secrets and secureHost: avoid enabling secret-backed services on hosts with `secureHost = false`.
- Validation:
- Toggle sets align with host capabilities and `my.secureHost`.
- Proxy- or container-dependent services have `enableProxy`/`enableContainers` enabled.
- IP/interface values match `docs/reference/index.md` entries.
- Outputs: Updated host toggle file reflecting new enablement and infrastructure flags.
- References: `docs/constitution.md` (Hosts and toggles, Main server and proxies), `docs/reference/index.md` (Hosts and Roles, Proxy rules, Network maps)

View File

@@ -0,0 +1,18 @@
# Playbook: Add a NixOS Module
- Name: Add a module under `modules/<category>/`
- Purpose: Introduce a new module following auto-import and toggle conventions.
- Prerequisites: Identify target host(s) and toggle category; confirm `my.secureHost` if secrets are involved.
- Inputs: Module name, category (apps/dev/scripts/servers/services/shell/network), required options, secret needs, proxy requirements if server-facing.
- Steps:
1. Choose the category path from `docs/reference/index.md` and create `modules/<category>/<name>.nix` (auto-import picks it up; avoid names filtered out such as `librewolf.nix`).
2. Define options under `my.<category>` or reuse factories (`mkserver` for servers, `mkscript` for scripts) instead of hand-rolled patterns.
3. If the module needs secrets, guard references with `lib.mkIf config.my.secureHost` and map them to the correct secrets file (see secrets map).
4. For networked services, align host selection with `my.mainServer` and `my.ips`; enable reverse proxy via `enableProxy` when applicable.
5. Wire toggles for target hosts in `hosts/<host>/toggles.nix`, ensuring users/groups and containers/proxy flags are set.
- Validation:
- Module loads without extra imports (auto-import applies).
- Toggle wiring matches intended hosts; secureHost gating present for secrets.
- Proxy and port choices align with `my.mainServer`, `my.ips`, and firewall rules.
- Outputs: New module file and updated host toggles if required.
- References: `docs/constitution.md` (Module Categories, Secrets Map, Main server and proxies), `docs/reference/index.md` (Module Directories, Proxy rules, Secrets Map)

View File

@@ -0,0 +1,17 @@
# Playbook: Add a Script Unit
- Name: Add a script via `mkscript`
- Purpose: Ship a script package with optional user service and timer.
- Prerequisites: Identify target users (`my.toggleUsers.scripts` defaults), secureHost status if the script needs secrets, and whether a timer/service is required.
- Inputs: Script name, package derivation, description, timer schedule, users list, service needs.
- Steps:
1. Add a definition under `my.scripts.<name>` in `modules/scripts/<name>.nix` using `mkscript` options (`enable`, `install`, `service`, `users`, `timer`, `package`, `description`).
2. Ensure the package exposes the executable name used by the service/timer.
3. For user scoping, set `users` to a single user or list; defaults come from `my.toggleUsers.scripts`.
4. If secrets are required, guard references with `lib.mkIf config.my.secureHost` and map them to the appropriate secrets file.
5. Enable the script toggle in `hosts/<host>/toggles.nix` under `scripts` or `units`, and ensure timers/services are expected on that host.
- Validation:
- Script installs for intended users; systemd user service/timer activates only when `enable` and `service` are true.
- secureHost gating present for any secrets; no orphaned timers.
- Outputs: New script module and updated host toggles if needed.
- References: `docs/constitution.md` (Terminology, Secrets Map), `docs/reference/index.md` (Module Directories, Secrets Map, Hosts and Roles)

View File

@@ -0,0 +1,17 @@
# Playbook: Add a Secret Entry
- Name: Add or update a secret
- Purpose: Place secrets in the correct SOPS file with secureHost gating.
- Prerequisites: Target host(s) must have `my.secureHost = true`; identify secret type and consumer service/module.
- Inputs: Secret name, target file (certs/env/gallery/homepage/keys/wireguard/secrets), owner/group if file material is written, consuming module path.
- Steps:
1. Choose the correct secrets file from the map in `docs/constitution.md` and add the entry there (YAML, encrypted via sops-nix).
2. If a private key or file path is required, specify `owner`, `group`, and target path consistent with the consuming module.
3. In the consuming module, reference the secret under `config.sops.secrets.<name>` and guard with `lib.mkIf config.my.secureHost`.
4. For WireGuard entries, update `secrets/wireguard.yaml` and corresponding interface configuration under the target host.
5. Avoid adding secrets for hosts with `secureHost = false`; instead route the workload to a secure host or skip enablement.
- Validation:
- Secret lives in the correct file and encrypts with SOPS; file ownership matches service user where applicable.
- Module references are gated by `secureHost` and align with host toggles.
- Outputs: Updated secrets file and gated module references.
- References: `docs/constitution.md` (Secrets Map and secureHost), `docs/reference/index.md` (Secrets Map, Hosts and Roles)

View File

@@ -0,0 +1,18 @@
# Playbook: Add a Server Module with mkserver
- Name: Add a reverse-proxied server module
- Purpose: Stand up a server using `modules/factories/mkserver.nix` with correct proxy and host routing.
- Prerequisites: Target host must have `my.enableProxy = true` and container support if needed; confirm `my.secureHost` for secrets.
- Inputs: Service name, desired subdomain, port, proxy type (standard/fix/private), cron needs, secrets/env vars.
- Steps:
1. Create `modules/servers/<name>.nix` and import `mkserver` options to define `enable`, `enableProxy`, `port`, `host`, `hostName`, `url`, `ip`, `enableSocket`, and `certPath` as needed.
2. Default host routing uses `my.mainServer` and `my.ips`; override `hostName`/`ip` only when the service must live elsewhere.
3. For reverse proxy behavior, select helper from `parts/core.nix`: `proxyReverse` (standard), `proxyReverseFix` (preserve host headers/websockets), or `proxyReversePrivate` (mutual TLS).
4. Place secrets/env references in the appropriate file from the secrets map and guard with `lib.mkIf config.my.secureHost`.
5. Enable the service toggle in `hosts/<host>/toggles.nix` under `servers` (and `enableProxy` if not already set); add any firewall/static ports needed.
- Validation:
- Service resolves to the expected URL and IP per `my.ips` and `my.mainServer`.
- Proxy helper matches the protocol needs; SSL settings align with cert sources.
- Secrets load only on secure hosts; firewall assertions pass.
- Outputs: New server module with mkserver options and updated host toggles/firewall settings.
- References: `docs/constitution.md` (Main server and proxies, Secrets Map), `docs/reference/index.md` (Proxy rules, Module Directories, Secrets Map, Hosts and Roles)

View File

@@ -0,0 +1,15 @@
# Playbook Template
- Name:
- Purpose:
- Prerequisites: (toggles, hosts, secureHost, required secrets)
- Inputs: (paths, options, credentials, ports)
- Steps:
1.
2.
3.
- Validation:
-
-
- Outputs:
- References: `docs/constitution.md` (relevant sections) and `docs/reference/index.md` (paths/hosts/proxies/secrets)

65
docs/reference/index.md Normal file
View File

@@ -0,0 +1,65 @@
# Reference Map
## Module Directories
- apps → `modules/apps/` (desktop/workstation apps, auto-imported)
- dev → `modules/dev/` (language toolchains and dev shells, auto-imported)
- scripts → `modules/scripts/` (script units built via `mkscript`, auto-imported)
- servers → `modules/servers/` (reverse-proxied services built via `mkserver`)
- services → `modules/services/` (supporting services like syncthing, wireguard)
- shell → `modules/shell/` (shell customizations and CLI tooling)
- network → `modules/network/` (networking rules, firewall helpers)
- users → `modules/users/` (user-related options)
- nix → `modules/nix/` (Nix configuration and helpers)
- patches → `patches/` (patch artifacts referenced by modules)
- factories → `modules/factories/` (`mkserver.nix`, `mkscript.nix` shared helpers)
## Auto-Import Rules
- Source: `modules/modules.nix` uses `inputs.self.lib.autoImport` to load `.nix` files from module directories.
- Filter: Excludes `librewolf.nix`; all other `.nix` files in target dirs are loaded automatically.
- Implication: Place new modules in the correct category directory with a `.nix` filename; no manual import wiring required unless adding a new factory.
## Hosts and Roles
- Configs: `hosts/<name>/configuration.nix` with toggles in `hosts/<name>/toggles.nix`.
- Active hosts: `workstation`, `server`, `miniserver`, `galaxy`, `emacs`.
- Roles:
- workstation: developer desktop; provides build power for distributed builds.
- server: primary services host (overrides `my.mainServer = "server"` and enables proxies/containers).
- miniserver: small-footprint server; default `mainServer` in shared options.
- galaxy: small server variant using nixpkgs-small.
- emacs: VM profile, `my.secureHost = false` for secret-free usage.
- Network maps: `my.ips` and `my.interfaces` declared in `modules/modules.nix`; host toggles may override.
## Proxy, Firewall, and Networking
- Proxy enablement: `my.enableProxy` toggles Nginx reverse proxy; assertions require at least one `my.servers.*.enableProxy` when enabled.
- Proxy helpers: use `parts/core.nix` helpers (`proxy`, `proxyReverse`, `proxyReverseFix` for header preservation, `proxyReversePrivate` for mutual TLS). `mkserver` supplies `host`, `ip`, `url`, and `enableProxy` defaults per service.
- Main server selection: `my.mainServer` chooses where services live by default; `mkserver` sets `isLocal` based on this and picks IPs from `my.ips`.
- Firewall generation: `inputs.self.lib.generateFirewallPorts` combines static ports, additional ports, and service ports from `my.servers` (excluding native firewall services). Use `my.network.firewall` settings and `getServicesWithNativeFirewall` to derive open ports.
## Secrets Map
- Files and purposes:
- `secrets/certs.yaml` → certificates and TLS material.
- `secrets/env.yaml` → environment variables for services (e.g., lidarr-mb-gap).
- `secrets/gallery.yaml` → media/gallery credentials.
- `secrets/homepage.yaml` → homepage widget secrets.
- `secrets/keys.yaml` → SSH/private keys and key ownership.
- `secrets/wireguard.yaml` → WireGuard peers and private keys.
- `secrets/secrets.yaml` → default SOPS file (general secrets, fallback when unspecified).
- `secrets/ssh/` → host SSH keys and related artifacts.
- secureHost: Only hosts with `my.secureHost = true` consume SOPS entries and WireGuard interfaces. Keep secret references behind `lib.mkIf config.my.secureHost`.
## Stylix and Theming
- Stylix module: `config/stylix.nix` and stylix inputs in `flake.nix` apply theming. Host toggle `my.stylix.enable` controls activation (see host toggles).
- Schemes and assets: Imported via Stylix inputs; wallpapers/fonts sourced from external flakes (`wallpapers`, `fonts`).
## Playbooks and Templates
- Playbook template: `docs/playbooks/template.md`
- Workflows: `docs/playbooks/add-module.md`, `add-server.md`, `add-script.md`, `add-host-toggle.md`, `add-secret.md`
- Constitution link-back: `docs/constitution.md` sections on terminology, proxies, secrets, and maintenance.
- MCP server reference: `docs/reference/mcp-server.md` (tool catalog, invocation, syncDocs)
## Quick Audit Checklist
- Module coverage: All categories (apps, dev, scripts, servers, services, shell, network, users, nix, patches) have corresponding entries and auto-import rules.
- Host coverage: Active hosts listed with roles and secureHost status; `mainServer` noted.
- Proxy rules: `enableProxy` usage, proxy helper selection, and `my.ips` mappings documented.
- Secrets map: Every secrets file and secureHost gating captured; new secret types aligned to file purposes.
- Discoverability: Paths reachable within two clicks from `docs/constitution.md`.

View File

@@ -0,0 +1,28 @@
# MCP Server Reference
## Overview
- Purpose: local-only MCP server that exposes repository maintenance helpers to Codex CLI.
- Transport: JSON-RPC over stdio; no network listeners; enforced local-only guard.
- Source: `scripts/mcp-server/`; connect via `python -m mcp_server.server`.
## Tool Catalog
- `show-constitution`: Display `docs/constitution.md` to confirm authoritative rules.
- `list-playbooks`: List available playbooks under `docs/playbooks/` for common tasks.
- `show-reference`: Show `docs/reference/index.md` to navigate repo guidance.
- `search-docs`: Search the docs set for a query (param: `query`).
- `list-mcp-tasks`: Show MCP feature task list from `specs/001-mcp-server/tasks.md`.
## Invocation
- Start server: `python -m mcp_server.server` (from repo root, stdio mode).
- Codex CLI: configure MCP endpoint as local stdio, then call `listTools` to verify catalog.
- Invoke: `invokeTool` with `name` and `args` as defined above.
- Drift check: call `syncDocs` to report mismatches between tool catalog and documented anchors.
## Local-Only Expectations
- Remote access is blocked by guard clauses; unset `SSH_CONNECTION` applies local-only behavior.
- If `MCP_ALLOW_REMOTE` is set to `true/1/yes`, guard is relaxed (not recommended).
## Maintenance
- Update tool definitions in `scripts/mcp-server/src/mcp_server/tools.py` with doc anchors.
- Keep docs aligned by updating this reference and running `syncDocs`.
- CI: `.gitea/workflows/mcp-tests.yml` runs lint/format/mypy/pytest with a 60s budget on `scripts/**` and `docs/**` changes.

View File

@@ -6,7 +6,7 @@
'(flycheck-flake8-maximum-line-length 88)
'(safe-local-variable-values
'((org-hugo-auto-export-on-save . t)
(org-hugo-base-dir . /home/jawz/Development/Websites/portfolio/)
(org-hugo-base-dir . "~/Development/Websites/portfolio/")
(git-commit-major-mode . git-commit-elisp-text-mode))))
(custom-set-faces
;; custom-set-faces was added by Custom.

View File

@@ -1,38 +1,60 @@
{
config,
inputs,
lib,
pkgs,
...
}:
let
cfg = config.my.environments.gnome;
in
{
qt.enable = true;
services = {
gvfs.enable = true;
displayManager.gdm.enable = true;
desktopManager.gnome.enable = true;
options.my.environments.gnome = {
enable = lib.mkOption {
type = lib.types.bool;
default = true;
description = "Enable GNOME desktop environment";
};
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = "jawz";
description = "Users to install GNOME extensions for";
};
};
environment.gnome.excludePackages = builtins.attrValues {
inherit (pkgs)
baobab
cheese
epiphany
gnome-characters
gnome-connections
gnome-font-viewer
gnome-photos
# gnome-text-editor
gnome-tour
yelp
gnome-music
totem
;
};
users.users.jawz.packages = builtins.attrValues {
inherit (pkgs.gnomeExtensions)
tactile # window manager
freon # hardware temperature monitor
gamemode-shell-extension # I guess I'm a gamer now?
burn-my-windows # special effects for when closing windows
pano # clipboard manager
pop-shell
;
config = lib.mkIf cfg.enable {
qt.enable = true;
services = {
gvfs.enable = true;
displayManager.gdm.enable = true;
desktopManager.gnome.enable = true;
};
environment.gnome.excludePackages = builtins.attrValues {
inherit (pkgs)
baobab
cheese
epiphany
gnome-characters
gnome-connections
gnome-font-viewer
gnome-photos
# gnome-text-editor
gnome-tour
yelp
gnome-music
totem
;
};
users.users = inputs.self.lib.mkUserPackages lib cfg.users (
builtins.attrValues {
inherit (pkgs.gnomeExtensions)
tactile # window manager
freon # hardware temperature monitor
gamemode-shell-extension # I guess I'm a gamer now?
burn-my-windows # special effects for when closing windows
pano # clipboard manager
pop-shell
;
}
);
};
}

251
flake.lock generated
View File

@@ -20,11 +20,11 @@
]
},
"locked": {
"lastModified": 1764714051,
"narHash": "sha256-AjcMlM3UoavFoLzr0YrcvsIxALShjyvwe+o7ikibpCM=",
"lastModified": 1769428758,
"narHash": "sha256-0G/GzF7lkWs/yl82bXuisSqPn6sf8YGTnbEdFOXvOfU=",
"owner": "hyprwm",
"repo": "aquamarine",
"rev": "a43bedcceced5c21ad36578ed823e6099af78214",
"rev": "def5e74c97370f15949a67c62e61f1459fcb0e15",
"type": "github"
},
"original": {
@@ -54,17 +54,17 @@
"base16-fish": {
"flake": false,
"locked": {
"lastModified": 1754405784,
"narHash": "sha256-l9xHIy+85FN+bEo6yquq2IjD1rSg9fjfjpyGP1W8YXo=",
"lastModified": 1765809053,
"narHash": "sha256-XCUQLoLfBJ8saWms2HCIj4NEN+xNsWBlU1NrEPcQG4s=",
"owner": "tomyun",
"repo": "base16-fish",
"rev": "23ae20a0093dca0d7b39d76ba2401af0ccf9c561",
"rev": "86cbea4dca62e08fb7fd83a70e96472f92574782",
"type": "github"
},
"original": {
"owner": "tomyun",
"repo": "base16-fish",
"rev": "23ae20a0093dca0d7b39d76ba2401af0ccf9c561",
"rev": "86cbea4dca62e08fb7fd83a70e96472f92574782",
"type": "github"
}
},
@@ -182,11 +182,11 @@
"firefox-gnome-theme": {
"flake": false,
"locked": {
"lastModified": 1764724327,
"narHash": "sha256-OkFLrD3pFR952TrjQi1+Vdj604KLcMnkpa7lkW7XskI=",
"lastModified": 1764873433,
"narHash": "sha256-1XPewtGMi+9wN9Ispoluxunw/RwozuTRVuuQOmxzt+A=",
"owner": "rafaelmardojai",
"repo": "firefox-gnome-theme",
"rev": "66b7c635763d8e6eb86bd766de5a1e1fbfcc1047",
"rev": "f7ffd917ac0d253dbd6a3bf3da06888f57c69f92",
"type": "github"
},
"original": {
@@ -198,15 +198,15 @@
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1761588595,
"narHash": "sha256-XKUZz9zewJNUj46b4AJdiRZJAvSZ0Dqj2BNfXvFlJC4=",
"owner": "edolstra",
"lastModified": 1767039857,
"narHash": "sha256-vNpUSpF5Nuw8xvDLj2KCwwksIbjua2LZCqhV1LNRDns=",
"owner": "NixOS",
"repo": "flake-compat",
"rev": "f387cd2afec9419c8ee37694406ca490c3f34ee5",
"rev": "5edf11c44bc78a0d334f6334cdaf7d60d732daab",
"type": "github"
},
"original": {
"owner": "edolstra",
"owner": "NixOS",
"repo": "flake-compat",
"type": "github"
}
@@ -216,11 +216,11 @@
"nixpkgs-lib": "nixpkgs-lib"
},
"locked": {
"lastModified": 1763759067,
"narHash": "sha256-LlLt2Jo/gMNYAwOgdRQBrsRoOz7BPRkzvNaI/fzXi2Q=",
"lastModified": 1768135262,
"narHash": "sha256-PVvu7OqHBGWN16zSi6tEmPwwHQ4rLPU9Plvs8/1TUBY=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "2cccadc7357c0ba201788ae99c4dfa90728ef5e0",
"rev": "80daad04eddbbf5a4d883996a73f3f542fa437ac",
"type": "github"
},
"original": {
@@ -234,11 +234,11 @@
"nixpkgs-lib": "nixpkgs-lib_2"
},
"locked": {
"lastModified": 1763759067,
"narHash": "sha256-LlLt2Jo/gMNYAwOgdRQBrsRoOz7BPRkzvNaI/fzXi2Q=",
"lastModified": 1768135262,
"narHash": "sha256-PVvu7OqHBGWN16zSi6tEmPwwHQ4rLPU9Plvs8/1TUBY=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "2cccadc7357c0ba201788ae99c4dfa90728ef5e0",
"rev": "80daad04eddbbf5a4d883996a73f3f542fa437ac",
"type": "github"
},
"original": {
@@ -293,11 +293,11 @@
]
},
"locked": {
"lastModified": 1763759067,
"narHash": "sha256-LlLt2Jo/gMNYAwOgdRQBrsRoOz7BPRkzvNaI/fzXi2Q=",
"lastModified": 1767609335,
"narHash": "sha256-feveD98mQpptwrAEggBQKJTYbvwwglSbOv53uCfH9PY=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "2cccadc7357c0ba201788ae99c4dfa90728ef5e0",
"rev": "250481aafeb741edfe23d29195671c19b36b6dca",
"type": "github"
},
"original": {
@@ -324,6 +324,24 @@
"type": "github"
}
},
"flake-utils_2": {
"inputs": {
"systems": "systems_4"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"fonts": {
"flake": false,
"locked": {
@@ -382,11 +400,11 @@
"flake": false,
"locked": {
"host": "gitlab.gnome.org",
"lastModified": 1764524476,
"narHash": "sha256-bTmNn3Q4tMQ0J/P0O5BfTQwqEnCiQIzOGef9/aqAZvk=",
"lastModified": 1767737596,
"narHash": "sha256-eFujfIUQDgWnSJBablOuG+32hCai192yRdrNHTv0a+s=",
"owner": "GNOME",
"repo": "gnome-shell",
"rev": "c0e1ad9f0f703fd0519033b8f46c3267aab51a22",
"rev": "ef02db02bf0ff342734d525b5767814770d85b49",
"type": "gitlab"
},
"original": {
@@ -404,11 +422,11 @@
]
},
"locked": {
"lastModified": 1765170903,
"narHash": "sha256-O8VTGey1xxiRW+Fpb+Ps9zU7ShmxUA1a7cMTcENCVNg=",
"lastModified": 1769580047,
"narHash": "sha256-tNqCP/+2+peAXXQ2V8RwsBkenlfWMERb+Uy6xmevyhM=",
"owner": "nix-community",
"repo": "home-manager",
"rev": "20561be440a11ec57a89715480717baf19fe6343",
"rev": "366d78c2856de6ab3411c15c1cb4fb4c2bf5c826",
"type": "github"
},
"original": {
@@ -463,11 +481,11 @@
]
},
"locked": {
"lastModified": 1763733840,
"narHash": "sha256-JnET78yl5RvpGuDQy3rCycOCkiKoLr5DN1fPhRNNMco=",
"lastModified": 1769284023,
"narHash": "sha256-xG34vwYJ79rA2wVC8KFuM8r36urJTG6/csXx7LiiSYU=",
"owner": "hyprwm",
"repo": "hyprgraphics",
"rev": "8f1bec691b2d198c60cccabca7a94add2df4ed1a",
"rev": "13c536659d46893596412d180449353a900a1d31",
"type": "github"
},
"original": {
@@ -495,11 +513,11 @@
"xdph": "xdph"
},
"locked": {
"lastModified": 1765141510,
"narHash": "sha256-IjlKl72fJ40zZFiag9VTF37249jHCRHAE4RP7bI0OXA=",
"lastModified": 1769694617,
"narHash": "sha256-h8+Wqc4x68mN2qOLX45HsO6Z4eQOfrdtSKiSzcBrCVg=",
"owner": "hyprwm",
"repo": "Hyprland",
"rev": "a5b7c91329313503e8864761f24ef43fb630f35c",
"rev": "c92fb5e85f4a5fd3a0f5ffb5892f6a61cfe1be2b",
"type": "github"
},
"original": {
@@ -541,11 +559,11 @@
]
},
"locked": {
"lastModified": 1764812575,
"narHash": "sha256-1bK1yGgaR82vajUrt6z+BSljQvFn91D74WJ/vJsydtE=",
"lastModified": 1767023960,
"narHash": "sha256-R2HgtVS1G3KSIKAQ77aOZ+Q0HituOmPgXW9nBNkpp3Q=",
"owner": "hyprwm",
"repo": "hyprland-guiutils",
"rev": "fd321368a40c782cfa299991e5584ca338e36ebe",
"rev": "c2e906261142f5dd1ee0bfc44abba23e2754c660",
"type": "github"
},
"original": {
@@ -566,11 +584,11 @@
]
},
"locked": {
"lastModified": 1759610243,
"narHash": "sha256-+KEVnKBe8wz+a6dTLq8YDcF3UrhQElwsYJaVaHXJtoI=",
"lastModified": 1765214753,
"narHash": "sha256-P9zdGXOzToJJgu5sVjv7oeOGPIIwrd9hAUAP3PsmBBs=",
"owner": "hyprwm",
"repo": "hyprland-protocols",
"rev": "bd153e76f751f150a09328dbdeb5e4fab9d23622",
"rev": "3f3860b869014c00e8b9e0528c7b4ddc335c21ab",
"type": "github"
},
"original": {
@@ -595,11 +613,11 @@
]
},
"locked": {
"lastModified": 1764612430,
"narHash": "sha256-54ltTSbI6W+qYGMchAgCR6QnC1kOdKXN6X6pJhOWxFg=",
"lastModified": 1767983607,
"narHash": "sha256-8C2co8NYfR4oMOUEsPROOJ9JHrv9/ktbJJ6X1WsTbXc=",
"owner": "hyprwm",
"repo": "hyprlang",
"rev": "0d00dc118981531aa731150b6ea551ef037acddd",
"rev": "d4037379e6057246b408bbcf796cf3e9838af5b2",
"type": "github"
},
"original": {
@@ -672,11 +690,11 @@
]
},
"locked": {
"lastModified": 1764962281,
"narHash": "sha256-rGbEMhTTyTzw4iyz45lch5kXseqnqcEpmrHdy+zHsfo=",
"lastModified": 1766253372,
"narHash": "sha256-1+p4Kw8HdtMoFSmJtfdwjxM4bPxDK9yg27SlvUMpzWA=",
"owner": "hyprwm",
"repo": "hyprutils",
"rev": "fe686486ac867a1a24f99c753bb40ffed338e4b0",
"rev": "51a4f93ce8572e7b12b7284eb9e6e8ebf16b4be9",
"type": "github"
},
"original": {
@@ -726,11 +744,11 @@
]
},
"locked": {
"lastModified": 1764872015,
"narHash": "sha256-INI9AVrQG5nJZFvGPSiUZ9FEUZJLfGdsqjF1QSak7Gc=",
"lastModified": 1769202094,
"narHash": "sha256-gdJr/vWWLRW85ucatSjoBULPB2dqBJd/53CZmQ9t91Q=",
"owner": "hyprwm",
"repo": "hyprwire",
"rev": "7997451dcaab7b9d9d442f18985d514ec5891608",
"rev": "a45ca05050d22629b3c7969a926d37870d7dd75c",
"type": "github"
},
"original": {
@@ -747,11 +765,11 @@
"sudoku-solver": "sudoku-solver"
},
"locked": {
"lastModified": 1764529970,
"narHash": "sha256-XskTPGgQJlMXMpiD16J+EyG7G01SwybwK0MXgsfqi5E=",
"lastModified": 1768598739,
"narHash": "sha256-xBX3qJoJowBg80ZPTZ6RvoOkcrIY/RIxBYhq9XtrN8g=",
"ref": "refs/heads/master",
"rev": "e40d6fc2bb35c360078d8523b987c071591357c3",
"revCount": 122,
"rev": "155967f8e9b1018766bbbe85baaedde3156b79ee",
"revCount": 126,
"type": "git",
"url": "https://git.lebubu.org/jawz/scripts.git"
},
@@ -788,11 +806,11 @@
]
},
"locked": {
"lastModified": 1765073338,
"narHash": "sha256-UGkNtTs0E1SzskcUkkkWoh3vfZwPiHrk0SMRoQL86oE=",
"lastModified": 1769740349,
"narHash": "sha256-Tbk4SF5XhM9fnrDtPl4wy3ItkjRMcBTVuA26ThzLVcs=",
"owner": "fufexan",
"repo": "nix-gaming",
"rev": "7480cfb8bba3e352edf2c9334ff4b7c3ac84eb87",
"rev": "cd0a8141f410a6532a76546df2665a4e3c93b69b",
"type": "github"
},
"original": {
@@ -855,11 +873,11 @@
},
"nixpkgs-lib": {
"locked": {
"lastModified": 1761765539,
"narHash": "sha256-b0yj6kfvO8ApcSE+QmA6mUfu8IYG6/uU28OFn4PaC8M=",
"lastModified": 1765674936,
"narHash": "sha256-k00uTP4JNfmejrCLJOwdObYC9jHRrr/5M/a/8L2EIdo=",
"owner": "nix-community",
"repo": "nixpkgs.lib",
"rev": "719359f4562934ae99f5443f20aa06c2ffff91fc",
"rev": "2075416fcb47225d9b68ac469a5c4801a9c4dd85",
"type": "github"
},
"original": {
@@ -870,11 +888,11 @@
},
"nixpkgs-lib_2": {
"locked": {
"lastModified": 1761765539,
"narHash": "sha256-b0yj6kfvO8ApcSE+QmA6mUfu8IYG6/uU28OFn4PaC8M=",
"lastModified": 1765674936,
"narHash": "sha256-k00uTP4JNfmejrCLJOwdObYC9jHRrr/5M/a/8L2EIdo=",
"owner": "nix-community",
"repo": "nixpkgs.lib",
"rev": "719359f4562934ae99f5443f20aa06c2ffff91fc",
"rev": "2075416fcb47225d9b68ac469a5c4801a9c4dd85",
"type": "github"
},
"original": {
@@ -903,11 +921,11 @@
},
"nixpkgs-small": {
"locked": {
"lastModified": 1765178948,
"narHash": "sha256-Kb3mIrj4xLg2LeMvok0tpiGPis1VnrNJO0l4kW+0xmc=",
"lastModified": 1769724120,
"narHash": "sha256-6DBBx8SJSOU/RPSoy2kWBzRRjxZR2quC5ema5TJ1zVg=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "f376a52d0dc796aec60b5606a2676240ff1565b9",
"rev": "8ec59ed5093c2a742d7744e9ecf58f358aa4a87d",
"type": "github"
},
"original": {
@@ -919,11 +937,11 @@
},
"nixpkgs-unstable": {
"locked": {
"lastModified": 1764950072,
"narHash": "sha256-BmPWzogsG2GsXZtlT+MTcAWeDK5hkbGRZTeZNW42fwA=",
"lastModified": 1769461804,
"narHash": "sha256-msG8SU5WsBUfVVa/9RPLaymvi5bI8edTavbIq3vRlhI=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "f61125a668a320878494449750330ca58b78c557",
"rev": "bfc1b8a4574108ceef22f02bafcf6611380c100d",
"type": "github"
},
"original": {
@@ -935,11 +953,11 @@
},
"nixpkgs_2": {
"locked": {
"lastModified": 1764983851,
"narHash": "sha256-y7RPKl/jJ/KAP/VKLMghMgXTlvNIJMHKskl8/Uuar7o=",
"lastModified": 1769598131,
"narHash": "sha256-e7VO/kGLgRMbWtpBqdWl0uFg8Y2XWFMdz0uUJvlML8o=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "d9bc5c7dceb30d8d6fafa10aeb6aa8a48c218454",
"rev": "fa83fd837f3098e3e678e6cf017b2b36102c7211",
"type": "github"
},
"original": {
@@ -978,11 +996,11 @@
]
},
"locked": {
"lastModified": 1765185832,
"narHash": "sha256-z8duEjztk7g+Zm4DbZfAAYMAqb+ooaNPuOBhpvx7TiU=",
"lastModified": 1769764253,
"narHash": "sha256-lkjNGrUfTG1RR1AjvDqaYJcWsEkOhUz0w/U8tD0sjmk=",
"owner": "nix-community",
"repo": "nur",
"rev": "7be17d29475559cb8d7e35b5ed185b5a8ed8d7b6",
"rev": "db595036b2efc5f9de5053e6c5bdbf730ffe6f70",
"type": "github"
},
"original": {
@@ -1003,11 +1021,11 @@
]
},
"locked": {
"lastModified": 1764773531,
"narHash": "sha256-mCBl7MD1WZ7yCG6bR9MmpPO2VydpNkWFgnslJRIT1YU=",
"lastModified": 1767886815,
"narHash": "sha256-pB2BBv6X9cVGydEV/9Y8+uGCvuYJAlsprs1v1QHjccA=",
"owner": "nix-community",
"repo": "NUR",
"rev": "1d9616689e98beded059ad0384b9951e967a17fa",
"rev": "4ff84374d77ff62e2e13a46c33bfeb73590f9fef",
"type": "github"
},
"original": {
@@ -1026,11 +1044,11 @@
]
},
"locked": {
"lastModified": 1765016596,
"narHash": "sha256-rhSqPNxDVow7OQKi4qS5H8Au0P4S3AYbawBSmJNUtBQ=",
"lastModified": 1769069492,
"narHash": "sha256-Efs3VUPelRduf3PpfPP2ovEB4CXT7vHf8W+xc49RL/U=",
"owner": "cachix",
"repo": "git-hooks.nix",
"rev": "548fc44fca28a5e81c5d6b846e555e6b9c2a5a3c",
"rev": "a1ef738813b15cf8ec759bdff5761b027e3e1d23",
"type": "github"
},
"original": {
@@ -1039,6 +1057,27 @@
"type": "github"
}
},
"prem2resolve": {
"inputs": {
"flake-utils": "flake-utils_2",
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1769712701,
"narHash": "sha256-R5IOg12d7lJwaA3qmC7pQBNgUbmCDo5NHSCY/O95VQA=",
"ref": "refs/heads/main",
"rev": "f7b65d0b8f3d010ce6da4afe9dbd16b864260ae0",
"revCount": 5,
"type": "git",
"url": "https://git.lebubu.org/vibe-coded/prem2resolve.git"
},
"original": {
"type": "git",
"url": "https://git.lebubu.org/vibe-coded/prem2resolve.git"
}
},
"qbit_manage": {
"flake": false,
"locked": {
@@ -1071,6 +1110,7 @@
"nixpkgs-unstable": "nixpkgs-unstable",
"nixtendo-switch": "nixtendo-switch",
"nur": "nur",
"prem2resolve": "prem2resolve",
"qbit_manage": "qbit_manage",
"sops-nix": "sops-nix",
"stylix": "stylix",
@@ -1085,11 +1125,11 @@
]
},
"locked": {
"lastModified": 1765079830,
"narHash": "sha256-i9GMbBLkeZ7MVvy7+aAuErXkBkdRylHofrAjtpUPKt8=",
"lastModified": 1769469829,
"narHash": "sha256-wFcr32ZqspCxk4+FvIxIL0AZktRs6DuF8oOsLt59YBU=",
"owner": "Mic92",
"repo": "sops-nix",
"rev": "aeb517262102f13683d7a191c7e496b34df8d24c",
"rev": "c5eebd4eb2e3372fe12a8d70a248a6ee9dd02eff",
"type": "github"
},
"original": {
@@ -1111,7 +1151,7 @@
"nixpkgs"
],
"nur": "nur_2",
"systems": "systems_4",
"systems": "systems_5",
"tinted-foot": "tinted-foot",
"tinted-kitty": "tinted-kitty",
"tinted-schemes": "tinted-schemes",
@@ -1119,11 +1159,11 @@
"tinted-zed": "tinted-zed"
},
"locked": {
"lastModified": 1765065096,
"narHash": "sha256-abrrONk8vzRtY6fHEkjZOyRJpKHjPlFqMBE0+/DxfAU=",
"lastModified": 1769472288,
"narHash": "sha256-RdnbroWsujYh1MaMhDpP5QM+bRIGG6smz987v1fli+U=",
"owner": "danth",
"repo": "stylix",
"rev": "84d9d55885d463d461234f3aac07b2389a2577d8",
"rev": "c2c4a3ad52c096db1c8dde97d3d21451613f000c",
"type": "github"
},
"original": {
@@ -1210,6 +1250,21 @@
"type": "github"
}
},
"systems_5": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"tinted-foot": {
"flake": false,
"locked": {
@@ -1246,11 +1301,11 @@
"tinted-schemes": {
"flake": false,
"locked": {
"lastModified": 1763914658,
"narHash": "sha256-Hju0WtMf3iForxtOwXqGp3Ynipo0EYx1AqMKLPp9BJw=",
"lastModified": 1767817087,
"narHash": "sha256-eGE8OYoK6HzhJt/7bOiNV2cx01IdIrHL7gXgjkHRdNo=",
"owner": "tinted-theming",
"repo": "schemes",
"rev": "0f6be815d258e435c9b137befe5ef4ff24bea32c",
"rev": "bd99656235aab343e3d597bf196df9bc67429507",
"type": "github"
},
"original": {
@@ -1262,11 +1317,11 @@
"tinted-tmux": {
"flake": false,
"locked": {
"lastModified": 1764465359,
"narHash": "sha256-lbSVPqLEk2SqMrnpvWuKYGCaAlfWFMA6MVmcOFJjdjE=",
"lastModified": 1767489635,
"narHash": "sha256-e6nnFnWXKBCJjCv4QG4bbcouJ6y3yeT70V9MofL32lU=",
"owner": "tinted-theming",
"repo": "tinted-tmux",
"rev": "edf89a780e239263cc691a987721f786ddc4f6aa",
"rev": "3c32729ccae99be44fe8a125d20be06f8d7d8184",
"type": "github"
},
"original": {
@@ -1278,11 +1333,11 @@
"tinted-zed": {
"flake": false,
"locked": {
"lastModified": 1764464512,
"narHash": "sha256-rCD/pAhkMdCx6blsFwxIyvBJbPZZ1oL2sVFrH07lmqg=",
"lastModified": 1767488740,
"narHash": "sha256-wVOj0qyil8m+ouSsVZcNjl5ZR+1GdOOAooAatQXHbuU=",
"owner": "tinted-theming",
"repo": "base16-zed",
"rev": "907dbba5fb8cf69ebfd90b00813418a412d0a29a",
"rev": "11abb0b282ad3786a2aae088d3a01c60916f2e40",
"type": "github"
},
"original": {

View File

@@ -22,6 +22,10 @@
url = "git+https://git.lebubu.org/jawz/scripts.git";
inputs.nixpkgs.follows = "nixpkgs";
};
prem2resolve = {
url = "git+https://git.lebubu.org/vibe-coded/prem2resolve.git";
inputs.nixpkgs.follows = "nixpkgs";
};
lidarr-mb-gap = {
url = "git+https://git.lebubu.org/vibe-coded/lidarr-mb-gap.git";
inputs.nixpkgs.follows = "nixpkgs";

View File

@@ -21,7 +21,6 @@
secureHost = false;
stylix.enable = true;
emacs.enable = true;
apps.fonts.enable = true;
shell.tools.enable = true;
services.network.enable = true;
dev = {

View File

@@ -1,22 +1,34 @@
{ inputs }:
let
inherit (inputs.self.lib) mkEnabled mkEnabledWithProxy enableList;
inherit (inputs.self.lib)
mkEnabled
mkEnabledWithUsers
mkEnabledWithProxy
enableList
;
in
{
emacs.enable = true;
emacs = {
enable = true;
users = "jawz";
};
stylix = {
enable = true;
users = "jawz";
};
enableProxy = true;
websites.portfolio.enableProxy = true;
apps = enableList mkEnabled [
apps = enableList mkEnabledWithUsers [
"dictionaries"
];
services = enableList mkEnabled [
"network"
];
shell = enableList mkEnabled [
shell = enableList mkEnabledWithUsers [
"tools"
"multimedia"
];
dev = enableList mkEnabled [
dev = enableList mkEnabledWithUsers [
"nix"
"python"
"sh"

View File

@@ -5,6 +5,9 @@
inputs,
...
}:
let
lidarrMbGapId = 968;
in
{
imports = [
inputs.lidarr-mb-gap.nixosModules.lidarr-mb-gap
@@ -27,6 +30,8 @@
3452 # sonarqube
8448 # synapse ssl
8265 # tdarr
5173 # media map
51412 # qbittorrent
];
};
nix.buildMachines = [
@@ -47,13 +52,16 @@
sopsFile = ../../secrets/env.yaml;
};
"private_keys/lidarr-mb-gap" =
lib.mkIf (config.my.secureHost && config.services.lidarr-mb-gap.enable)
{
sopsFile = ../../secrets/keys.yaml;
owner = config.users.users.lidarr-mb-gap.name;
inherit (config.users.users.lidarr-mb-gap) group;
path = "${config.users.users.lidarr-mb-gap.home}/.ssh/ed25519_lidarr-mb-gap";
};
let
cfg = config.services.lidarr-mb-gap;
usr = config.users.users.lidarr-mb-gap;
in
lib.mkIf (config.my.secureHost && cfg.enable) {
sopsFile = ../../secrets/keys.yaml;
owner = usr.name;
inherit (usr) group;
path = "${usr.home}/.ssh/ed25519_lidarr-mb-gap";
};
};
networking = {
hostName = "server";
@@ -80,6 +88,13 @@
users.users.jawz.packages = builtins.attrValues {
inherit (pkgs) podman-compose attic-client;
};
users.groups.lidarr-mb-gap.gid = lidarrMbGapId;
users.users.lidarr-mb-gap = {
uid = lidarrMbGapId;
isSystemUser = true;
group = "lidarr-mb-gap";
home = "/var/lib/lidarr-mb-gap";
};
services = {
btrfs.autoScrub = {
enable = true;
@@ -90,7 +105,7 @@
};
lidarr-mb-gap = {
enable = true;
package = inputs.lidarr-mb-gap.packages.${pkgs.system}.lidarr-mb-gap;
package = inputs.lidarr-mb-gap.packages.${pkgs.stdenv.hostPlatform.system}.lidarr-mb-gap;
home = "/var/lib/lidarr-mb-gap";
envFile = config.sops.secrets.lidarr-mb-gap.path;
runInterval = "weekly";

View File

@@ -1,16 +1,23 @@
{ config, inputs }:
let
inherit (inputs.self.lib) mkEnabled enableList;
inherit (inputs.self.lib) mkEnabled mkEnabledWithUsers enableList;
mkEnabledIp = inputs.self.lib.mkEnabledIp config.my.ips.wg-server;
in
{
mainServer = "server";
emacs.enable = true;
stylix.enable = true;
emacs = {
enable = true;
users = "jawz";
};
stylix = {
enable = true;
users = "jawz";
};
enableProxy = true;
enableContainers = true;
apps.dictionaries.enable = true;
shell = enableList mkEnabled [
apps.dictionaries.users = "jawz";
shell = enableList mkEnabledWithUsers [
"tools"
"multimedia"
];
@@ -19,7 +26,7 @@ in
"nvidia"
"syncthing"
];
dev = enableList mkEnabled [
dev = enableList mkEnabledWithUsers [
"nix"
"python"
"sh"
@@ -81,5 +88,9 @@ in
"audiobookshelf"
"vaultwarden"
"readeck"
"keycloak"
"oauth2-proxy"
"isso"
"plausible"
];
}

View File

@@ -22,6 +22,7 @@ in
../../config/base.nix
../../config/stylix.nix
../../environments/gnome.nix
../../config/derek.nix
];
my = import ./toggles.nix { inherit inputs; } // {
nix.cores = 8;
@@ -31,6 +32,10 @@ in
"nixserver"
"nixminiserver"
];
environments.gnome.users = [
"jawz"
"bearded_dragonn"
];
};
home-manager.users.jawz.programs = {
vscode = {
@@ -123,7 +128,6 @@ in
../../secrets/ssh/root-private-ca.pem
];
services = {
minio.enable = true;
flatpak.enable = true;
open-webui.enable = true;
scx = {
@@ -143,48 +147,5 @@ in
acceleration = "cuda";
models = "/srv/ai/ollama";
};
postgresql = {
enable = true;
package = pkgs.postgresql_17;
enableTCPIP = true;
authentication = pkgs.lib.mkOverride 10 ''
local all all trust
host all all ${config.my.localhost}/32 trust
host all all ::1/128 trust
'';
ensureDatabases = [ "webref" ];
ensureUsers = [
{
name = "webref";
ensureDBOwnership = true;
}
];
};
};
programs.virt-manager.enable = true;
users.groups.libvirtd.members = [ "jawz" ];
virtualisation.libvirtd.enable = true;
systemd.services.minio-init = {
description = "Initialize MinIO buckets";
after = [ "minio.service" ];
wantedBy = [ "multi-user.target" ];
serviceConfig = {
Type = "oneshot";
RemainAfterExit = true;
};
script = ''
# Wait for MinIO to be ready
until ${pkgs.curl}/bin/curl -sf http://localhost:9000/minio/health/live > /dev/null 2>&1; do
echo "Waiting for MinIO..."
sleep 1
done
# Configure mc alias and create bucket
${pkgs.minio-client}/bin/mc alias set local http://localhost:9000 minioadmin minioadmin || true
${pkgs.minio-client}/bin/mc mb local/webref || true
${pkgs.minio-client}/bin/mc anonymous set public local/webref || true
echo "MinIO initialized with webref bucket"
'';
};
}

View File

@@ -1,29 +1,37 @@
{ inputs }:
let
inherit (inputs.self.lib) mkEnabled enableList;
inherit (inputs.self.lib) mkEnabled mkEnabledWithUsers enableList;
in
{
stylix.enable = true;
emacs.enable = true;
stylix = {
enable = true;
users = "jawz";
};
emacs = {
enable = true;
users = "jawz";
};
enableContainers = true;
servers.drpp.enable = true;
apps = enableList mkEnabled [
"art"
"dictionaries"
"fonts"
"gaming"
"switch"
"internet"
"multimedia"
"office"
"misc"
];
dev = enableList mkEnabled [
apps =
(enableList mkEnabledWithUsers [
"art"
"dictionaries"
"gaming"
"internet"
"multimedia"
"office"
"misc"
])
// {
switch.enable = true;
};
dev = enableList mkEnabledWithUsers [
"nix"
"python"
"sh"
];
shell = enableList mkEnabled [
shell = enableList mkEnabledWithUsers [
"exercism"
"multimedia"
"tools"

View File

@@ -1,5 +1,6 @@
{
config,
inputs,
lib,
pkgs,
...
@@ -36,8 +37,19 @@ let
in
{
options.my = {
apps.art.enable = lib.mkEnableOption "digital art and creative applications";
apps.art = {
enable = lib.mkEnableOption "digital art and creative applications";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.apps;
description = "Users to install art packages for";
};
};
dev.gameDev.enable = lib.mkEnableOption "game development tools and engines";
};
config.users.users.jawz.packages = artPackages ++ gameDevPackages;
config.users.users =
let
packages = artPackages ++ gameDevPackages;
in
inputs.self.lib.mkUserPackages lib config.my.apps.art.users packages;
}

View File

@@ -1,21 +1,32 @@
{
config,
inputs,
lib,
pkgs,
...
}:
let
packages = builtins.attrValues {
inherit (pkgs)
hunspell
;
inherit (pkgs.hunspellDicts)
it_IT
es_MX
en_CA-large
;
};
in
{
options.my.apps.dictionaries.enable = lib.mkEnableOption "dictionaries and language tools";
config = lib.mkIf config.my.apps.dictionaries.enable {
users.users.jawz.packages = builtins.attrValues {
inherit (pkgs)
hunspell
;
inherit (pkgs.hunspellDicts)
it_IT
es_MX
en_CA-large
;
options.my.apps.dictionaries = {
enable = lib.mkEnableOption "dictionaries and language tools";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.apps;
description = "Users to install dictionaries packages for";
};
};
config = lib.mkIf config.my.apps.dictionaries.enable {
users.users = inputs.self.lib.mkUserPackages lib config.my.apps.dictionaries.users packages;
};
}

View File

@@ -1,44 +0,0 @@
{
config,
lib,
pkgs,
inputs,
...
}:
let
customFonts = pkgs.stdenvNoCC.mkDerivation {
name = "custom-fonts";
src = inputs.fonts;
installPhase = ''
mkdir -p $out/share/fonts
find $src -type f \( \
-name "*.ttf" -o \
-name "*.otf" -o \
-name "*.woff" -o \
-name "*.woff2" \
\) -exec cp {} $out/share/fonts/ \;
'';
};
in
{
options.my.apps.fonts.enable = lib.mkEnableOption "additional fonts and typography";
config = lib.mkIf config.my.apps.fonts.enable {
nixpkgs.config.allowUnfreePredicate = pkg: builtins.elem (lib.getName pkg) [ "corefonts" ];
fonts.packages = builtins.attrValues {
inherit customFonts;
inherit (pkgs)
symbola
comic-neue
cascadia-code
corefonts
;
inherit (pkgs.nerd-fonts)
caskaydia-cove
open-dyslexic
comic-shanns-mono
iosevka
agave
;
};
};
}

View File

@@ -1,6 +1,6 @@
{
inputs,
config,
inputs,
lib,
pkgs,
...
@@ -23,7 +23,14 @@ in
{
imports = [ inputs.nix-gaming.nixosModules.platformOptimizations ];
options.my.apps = {
gaming.enable = lib.mkEnableOption "gaming applications and emulators";
gaming = {
enable = lib.mkEnableOption "gaming applications and emulators";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.apps;
description = "Users to install gaming packages for";
};
};
switch.enable = lib.mkEnableOption "Nintendo Switch homebrew tools";
};
config = lib.mkIf config.my.apps.gaming.enable {
@@ -50,23 +57,28 @@ in
# environmentFile = config.sops.secrets.switch-presence.path;
# };
};
users.users.jawz.packages = builtins.attrValues {
inherit retroarchWithCores;
inherit (pkgs)
shipwright # zelda OoT port
mangohud # fps & stats overlay
lutris # games launcher & emulator hub
cartridges # games launcher
gamemode # optimizes linux to have better gaming performance
heroic # install epic games
protonup-qt # update proton-ge
ns-usbloader # load games into my switch
# emulators
rpcs3 # ps3
cemu # wii u
ryubing # switch
prismlauncher # minecraft launcher with jdk overlays
;
};
users.users =
let
packages = builtins.attrValues {
inherit retroarchWithCores;
inherit (pkgs)
mgba # gba emulator
shipwright # zelda OoT port
mangohud # fps & stats overlay
lutris # games launcher & emulator hub
cartridges # games launcher
gamemode # optimizes linux to have better gaming performance
heroic # install epic games
protonup-qt # update proton-ge
ns-usbloader # load games into my switch
# emulators
rpcs3 # ps3
cemu # wii u
ryubing # switch
prismlauncher # minecraft launcher with jdk overlays
;
};
in
inputs.self.lib.mkUserPackages lib config.my.apps.gaming.users packages;
};
}

View File

@@ -1,5 +1,6 @@
{
config,
inputs,
lib,
pkgs,
...
@@ -27,26 +28,39 @@ let
krisp-patcher = pkgs.writers.writePython3Bin "krisp-patcher" krisp-settings krisp-patch;
in
{
options.my.apps.internet.enable = lib.mkEnableOption "internet browsers and communication apps";
config = lib.mkIf config.my.apps.internet.enable {
home-manager.users.jawz.programs.librewolf = import ./librewolf.nix;
programs.geary.enable = true;
users.users.jawz.packages = builtins.attrValues {
# inherit (inputs.zen-browser.packages.x86_64-linux) twilight;
inherit krisp-patcher;
inherit (pkgs)
# thunderbird # email client
warp # transfer files with based ppl
nextcloud-client # self-hosted google-drive alternative
fragments # beautiful torrent client
tor-browser # dark web, so dark!
telegram-desktop # furry chat
nicotine-plus # remember Ares?
discord # :3
vdhcoapp # video download helper assistant
nextcloud-talk-desktop # nextcloud talk client
fractal # matrix client
;
options.my.apps.internet = {
enable = lib.mkEnableOption "internet browsers and communication apps";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.apps;
description = "Users to install internet packages for";
};
};
config = lib.mkIf config.my.apps.internet.enable {
home-manager.users = inputs.self.lib.mkHomeManagerUsers lib config.my.apps.internet.users (_user: {
programs.librewolf = import ./librewolf.nix;
});
programs.geary.enable = true;
users.users =
let
packages = builtins.attrValues {
# inherit (inputs.zen-browser.packages.x86_64-linux) twilight;
inherit krisp-patcher;
inherit (pkgs)
# thunderbird # email client
warp # transfer files with based ppl
nextcloud-client # self-hosted google-drive alternative
fragments # beautiful torrent client
tor-browser # dark web, so dark!
telegram-desktop # furry chat
nicotine-plus # remember Ares?
discord # :3
vdhcoapp # video download helper assistant
nextcloud-talk-desktop # nextcloud talk client
fractal # matrix client
;
};
in
inputs.self.lib.mkUserPackages lib config.my.apps.internet.users packages;
};
}

View File

@@ -1,19 +1,31 @@
{
config,
inputs,
lib,
pkgs,
...
}:
{
options.my.apps.misc.enable = lib.mkEnableOption "miscellaneous desktop applications";
config = lib.mkIf config.my.apps.misc.enable {
users.users.jawz.packages = builtins.attrValues {
inherit (pkgs)
blanket # background noise
metadata-cleaner # remove any metadata and geolocation from files
pika-backup # backups
gnome-obfuscate # censor private information
;
options.my.apps.misc = {
enable = lib.mkEnableOption "miscellaneous desktop applications";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.apps;
description = "Users to install misc packages for";
};
};
config = lib.mkIf config.my.apps.misc.enable {
users.users =
let
packages = builtins.attrValues {
inherit (pkgs)
blanket # background noise
metadata-cleaner # remove any metadata and geolocation from files
pika-backup # backups
gnome-obfuscate # censor private information
;
};
in
inputs.self.lib.mkUserPackages lib config.my.apps.misc.users packages;
};
}

View File

@@ -1,21 +1,55 @@
{
config,
inputs,
lib,
pkgs,
...
}:
let
cfg = config.my.apps.multimedia;
attrValuesIf = cond: attrs: if cond then builtins.attrValues attrs else [ ];
multimediaPackages = attrValuesIf cfg.enable {
inherit (pkgs)
curtail # image compressor
easyeffects # equalizer
identity # compare images or videos
mousai # poor man shazam
shortwave # listen to world radio
tagger # tag music files
;
};
videoEditingPackages = attrValuesIf cfg.videoEditing.enable {
inherit (pkgs)
davinci-resolve
shotcut
pitivi
;
inherit (pkgs.kdePackages)
kdenlive
;
};
in
{
options.my.apps.multimedia.enable = lib.mkEnableOption "multimedia applications and media players";
config = lib.mkIf config.my.apps.multimedia.enable {
users.users.jawz.packages = builtins.attrValues {
inherit (pkgs)
curtail # image compressor
easyeffects # equalizer
identity # compare images or videos
mousai # poor man shazam
shortwave # listen to world radio
tagger # tag music files
;
options.my.apps.multimedia = {
enable = lib.mkEnableOption "multimedia applications and media players";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.apps;
description = "Users to install multimedia packages for";
};
videoEditing = {
enable = lib.mkEnableOption "video editing applications";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.apps;
description = "Users to install video editing packages for";
};
};
};
config = lib.mkIf (cfg.enable || cfg.videoEditing.enable) {
users.users = lib.mkMerge [
(inputs.self.lib.mkUserPackages lib cfg.users multimediaPackages)
(inputs.self.lib.mkUserPackages lib cfg.videoEditing.users videoEditingPackages)
];
};
}

View File

@@ -1,18 +1,30 @@
{
config,
inputs,
lib,
pkgs,
...
}:
{
options.my.apps.piano.enable = lib.mkEnableOption "piano learning and music theory apps";
config = lib.mkIf config.my.apps.piano.enable {
users.users.jawz.packages = builtins.attrValues {
inherit (pkgs)
neothesia
linthesia
timidity
;
options.my.apps.piano = {
enable = lib.mkEnableOption "piano learning and music theory apps";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.apps;
description = "Users to install piano packages for";
};
};
config = lib.mkIf config.my.apps.piano.enable {
users.users =
let
packages = builtins.attrValues {
inherit (pkgs)
neothesia
linthesia
timidity
;
};
in
inputs.self.lib.mkUserPackages lib config.my.apps.piano.users packages;
};
}

View File

@@ -1,22 +1,34 @@
{
config,
inputs,
lib,
pkgs,
...
}:
{
options.my.apps.office.enable = lib.mkEnableOption "office applications and productivity tools";
config = lib.mkIf config.my.apps.office.enable {
environment.variables.CALIBRE_USE_SYSTEM_THEME = "1";
users.users.jawz.packages = builtins.attrValues {
inherit (pkgs)
jre17_minimal # for libreoffice extensions
libreoffice # office, but based & european
calibre # ugly af eBook library manager
newsflash # feed reader, syncs with nextcloud
furtherance # I packaged this one tehee track time utility
# planify # let's pretend I will organize my tasks
;
options.my.apps.office = {
enable = lib.mkEnableOption "office applications and productivity tools";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.apps;
description = "Users to install office packages for";
};
};
config = lib.mkIf config.my.apps.office.enable {
environment.variables.CALIBRE_USE_SYSTEM_THEME = "1";
users.users =
let
packages = builtins.attrValues {
inherit (pkgs)
jre17_minimal # for libreoffice extensions
libreoffice # office, but based & european
calibre # ugly af eBook library manager
newsflash # feed reader, syncs with nextcloud
furtherance # I packaged this one tehee track time utility
# planify # let's pretend I will organize my tasks
;
};
in
inputs.self.lib.mkUserPackages lib config.my.apps.office.users packages;
};
}

View File

@@ -1,5 +1,6 @@
{
config,
inputs,
lib,
pkgs,
...
@@ -17,7 +18,14 @@ let
in
{
options = {
my.dev.cc.enable = lib.mkEnableOption "Install C/C++ tooling globally";
my.dev.cc = {
enable = lib.mkEnableOption "Install C/C++ tooling globally";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.dev;
description = "Users to install C/C++ packages for";
};
};
devShells.cc = lib.mkOption {
type = lib.types.package;
default = pkgs.mkShell {
@@ -31,6 +39,6 @@ in
};
};
config = lib.mkIf config.my.dev.cc.enable {
users.users.jawz = { inherit packages; };
users.users = inputs.self.lib.mkUserAttrs lib config.my.dev.cc.users { inherit packages; };
};
}

View File

@@ -1,5 +1,6 @@
{
config,
inputs,
lib,
pkgs,
...
@@ -14,7 +15,14 @@ let
in
{
options = {
my.dev.docker.enable = lib.mkEnableOption "Install Docker tooling globally";
my.dev.docker = {
enable = lib.mkEnableOption "Install Docker tooling globally";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.dev;
description = "Users to install Docker packages for";
};
};
devShells.docker = lib.mkOption {
type = lib.types.package;
default = pkgs.mkShell {
@@ -29,7 +37,7 @@ in
};
config = lib.mkMerge [
(lib.mkIf config.my.dev.docker.enable {
users.users.jawz = { inherit packages; };
users.users = inputs.self.lib.mkUserAttrs lib config.my.dev.docker.users { inherit packages; };
})
{
environment.variables.DOCKER_CONFIG = "\${XDG_CONFIG_HOME}/docker";

View File

@@ -6,9 +6,16 @@
...
}:
{
options.my.emacs.enable = lib.mkEnableOption "Doom Emacs configuration";
options.my.emacs = {
enable = lib.mkEnableOption "Doom Emacs configuration";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.dev;
description = "Users to install Emacs packages for";
};
};
config = lib.mkIf config.my.emacs.enable {
home-manager.users.jawz = {
home-manager.users = inputs.self.lib.mkHomeManagerUsers lib config.my.emacs.users (_user: {
xdg.dataFile = {
"doom/templates/events.org".source = ../../dotfiles/doom/templates/events.org;
"doom/templates/default.org".source = ../../dotfiles/doom/templates/default.org;
@@ -21,41 +28,46 @@
edit = "emacsclient -t";
e = "edit";
};
};
users.users.jawz.packages = builtins.attrValues {
inherit (pkgs.xorg) xwininfo;
inherit (pkgs)
#emacs everywhere
xdotool
xclip
wl-clipboard-rs
fd # modern find, faster searches
fzf # fuzzy finder! super cool and useful
ripgrep # modern grep
tree-sitter # code parsing based on symbols and shit, I do not get it
graphviz # graphs
tetex # export pdf
languagetool # proofreader for English
# lsps
yaml-language-server
markdownlint-cli
;
inherit (pkgs.nodePackages)
vscode-json-languageserver
prettier # multi-language linter
;
};
});
users.users =
let
packages = builtins.attrValues {
inherit (pkgs.xorg) xwininfo;
inherit (pkgs)
#emacs everywhere
xdotool
xclip
wl-clipboard-rs
fd # modern find, faster searches
fzf # fuzzy finder! super cool and useful
ripgrep # modern grep
tree-sitter # code parsing based on symbols and shit, I do not get it
graphviz # graphs
tetex # export pdf
languagetool # proofreader for English
# lsps
yaml-language-server
markdownlint-cli
;
inherit (pkgs.nodePackages)
vscode-json-languageserver
prettier # multi-language linter
;
};
in
inputs.self.lib.mkUserPackages lib config.my.emacs.users packages;
services.emacs = {
enable = true;
defaultEditor = true;
package = pkgs.emacsWithDoom {
doomDir = ../../dotfiles/doom;
doomLocalDir = "/home/jawz/.local/share/nix-doom";
doomLocalDir = "/home/${inputs.self.lib.getFirstUser config.my.emacs.users}/.local/share/nix-doom";
tangleArgs = "--all config.org";
extraPackages =
epkgs:
let
inherit (config.home-manager.users.jawz.programs.emacs)
inherit
(config.home-manager.users.${inputs.self.lib.getFirstUser config.my.emacs.users}.programs.emacs)
extraPackages
extraConfig
;

View File

@@ -1,5 +1,6 @@
{
config,
inputs,
lib,
pkgs,
...
@@ -20,7 +21,14 @@ let
in
{
options = {
my.dev.go.enable = lib.mkEnableOption "Install Go tooling globally";
my.dev.go = {
enable = lib.mkEnableOption "Install Go tooling globally";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.dev;
description = "Users to install Go packages for";
};
};
devShells.go = lib.mkOption {
type = lib.types.package;
default = pkgs.mkShell {
@@ -38,7 +46,7 @@ in
environment.variables = { inherit GOPATH; };
}
(lib.mkIf config.my.dev.go.enable {
users.users.jawz = { inherit packages; };
users.users = inputs.self.lib.mkUserAttrs lib config.my.dev.go.users { inherit packages; };
})
];
}

View File

@@ -1,5 +1,6 @@
{
config,
inputs,
lib,
pkgs,
...
@@ -18,7 +19,14 @@ let
in
{
options = {
my.dev.haskell.enable = lib.mkEnableOption "Install Haskell tooling globally";
my.dev.haskell = {
enable = lib.mkEnableOption "Install Haskell tooling globally";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.dev;
description = "Users to install Haskell packages for";
};
};
devShells.haskell = lib.mkOption {
type = lib.types.package;
default = pkgs.mkShell {
@@ -33,7 +41,7 @@ in
};
config = lib.mkMerge [
(lib.mkIf config.my.dev.haskell.enable {
users.users.jawz = { inherit packages; };
users.users = inputs.self.lib.mkUserAttrs lib config.my.dev.haskell.users { inherit packages; };
})
{
environment.variables = {

View File

@@ -1,5 +1,6 @@
{
config,
inputs,
lib,
pkgs,
...
@@ -12,7 +13,14 @@ let
in
{
options = {
my.dev.javascript.enable = lib.mkEnableOption "Install JavaScript tooling globally";
my.dev.javascript = {
enable = lib.mkEnableOption "Install JavaScript tooling globally";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.dev;
description = "Users to install JavaScript packages for";
};
};
devShells.javascript = lib.mkOption {
type = lib.types.package;
default = pkgs.mkShell {
@@ -27,16 +35,18 @@ in
};
config = lib.mkMerge [
(lib.mkIf config.my.dev.javascript.enable {
users.users.jawz = { inherit packages; };
users.users = inputs.self.lib.mkUserAttrs lib config.my.dev.javascript.users { inherit packages; };
})
{
home-manager.users.jawz.xdg.configFile = {
"npm/npmrc".source = ../../dotfiles/npmrc;
"configstore/update-notifier-npm-check.json".text = builtins.toJSON {
optOut = false;
lastUpdateCheck = 1646662583446;
home-manager.users = inputs.self.lib.mkHomeManagerUsers lib config.my.dev.javascript.users (_user: {
xdg.configFile = {
"npm/npmrc".source = ../../dotfiles/npmrc;
"configstore/update-notifier-npm-check.json".text = builtins.toJSON {
optOut = false;
lastUpdateCheck = 1646662583446;
};
};
};
});
environment.variables = {
NPM_CONFIG_USERCONFIG = "\${XDG_CONFIG_HOME}/npm/npmrc";
PNPM_HOME = "\${XDG_DATA_HOME}/pnpm";

View File

@@ -1,5 +1,6 @@
{
config,
inputs,
lib,
pkgs,
...
@@ -11,7 +12,14 @@ let
in
{
options = {
my.dev.julia.enable = lib.mkEnableOption "Install Julia globally";
my.dev.julia = {
enable = lib.mkEnableOption "Install Julia globally";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.dev;
description = "Users to install Julia packages for";
};
};
devShells.julia = lib.mkOption {
type = lib.types.package;
default = pkgs.mkShell {
@@ -25,6 +33,6 @@ in
};
};
config = lib.mkIf config.my.dev.julia.enable {
users.users.jawz = { inherit packages; };
users.users = inputs.self.lib.mkUserAttrs lib config.my.dev.julia.users { inherit packages; };
};
}

View File

@@ -19,7 +19,14 @@ let
in
{
options = {
my.dev.nix.enable = lib.mkEnableOption "Install Nix tooling globally";
my.dev.nix = {
enable = lib.mkEnableOption "Install Nix tooling globally";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.dev;
description = "Users to install Nix packages for";
};
};
devShells.nix = lib.mkOption {
type = lib.types.package;
default = pkgs.mkShell {
@@ -33,20 +40,20 @@ in
};
};
config = lib.mkIf config.my.dev.nix.enable {
users.users.jawz = { inherit packages; };
home-manager.users.jawz.programs.${shellType}.shellAliases =
inputs.self.lib.mergeAliases inputs.self.lib.commonAliases
{
nixformat = ''
deadnix -e && \
nix run nixpkgs#nixfmt-tree && \
statix fix
'';
nix-push-cache = ''
nix build $NH_FLAKE#nixosConfigurations.${config.networking.hostName}.config.system.build.toplevel \
--print-out-paths --fallback --max-jobs 100 --cores 0 |
nix run nixpkgs#attic-client -- push lan:nixos --stdin
'';
};
users.users = inputs.self.lib.mkUserAttrs lib config.my.dev.nix.users { inherit packages; };
home-manager.users = inputs.self.lib.mkHomeManagerUsers lib config.my.dev.nix.users (_user: {
programs.${shellType}.shellAliases = inputs.self.lib.mergeAliases inputs.self.lib.commonAliases {
nixformat = ''
deadnix -e && \
nix run nixpkgs#nixfmt-tree && \
statix fix
'';
nix-push-cache = ''
nix build $NH_FLAKE#nixosConfigurations.${config.networking.hostName}.config.system.build.toplevel \
--print-out-paths --fallback --max-jobs 100 --cores 0 |
nix run nixpkgs#attic-client -- push lan:nixos --stdin
'';
};
});
};
}

View File

@@ -1,5 +1,6 @@
{
config,
inputs,
lib,
pkgs,
...
@@ -30,7 +31,14 @@ let
in
{
options = {
my.dev.python.enable = lib.mkEnableOption "Install Python tools globally";
my.dev.python = {
enable = lib.mkEnableOption "Install Python tools globally";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.dev;
description = "Users to install Python packages for";
};
};
devShells.python = lib.mkOption {
type = lib.types.package;
default = pkgs.mkShell {
@@ -46,10 +54,12 @@ in
};
config = lib.mkMerge [
(lib.mkIf config.my.dev.python.enable {
users.users.jawz = { inherit packages; };
users.users = inputs.self.lib.mkUserAttrs lib config.my.dev.python.users { inherit packages; };
})
{
home-manager.users.jawz.xdg.configFile."python/pythonrc".source = ../../dotfiles/pythonrc;
home-manager.users = inputs.self.lib.mkHomeManagerUsers lib config.my.dev.python.users (_user: {
xdg.configFile."python/pythonrc".source = ../../dotfiles/pythonrc;
});
environment.variables.PYTHONSTARTUP = "\${XDG_CONFIG_HOME}/python/pythonrc";
}
];

View File

@@ -1,5 +1,6 @@
{
config,
inputs,
lib,
pkgs,
...
@@ -12,7 +13,14 @@ let
in
{
options = {
my.dev.ruby.enable = lib.mkEnableOption "Install Ruby tooling globally";
my.dev.ruby = {
enable = lib.mkEnableOption "Install Ruby tooling globally";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.dev;
description = "Users to install Ruby packages for";
};
};
devShells.ruby = lib.mkOption {
type = lib.types.package;
default = pkgs.mkShell {
@@ -27,7 +35,7 @@ in
};
config = lib.mkMerge [
(lib.mkIf config.my.dev.ruby.enable {
users.users.jawz = { inherit packages; };
users.users = inputs.self.lib.mkUserAttrs lib config.my.dev.ruby.users { inherit packages; };
})
{
environment.variables = {

View File

@@ -1,5 +1,6 @@
{
config,
inputs,
lib,
pkgs,
...
@@ -17,7 +18,14 @@ let
in
{
options = {
my.dev.rust.enable = lib.mkEnableOption "Install Rust tooling globally";
my.dev.rust = {
enable = lib.mkEnableOption "Install Rust tooling globally";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.dev;
description = "Users to install Rust packages for";
};
};
devShells.rust = lib.mkOption {
type = lib.types.package;
default = pkgs.mkShell {
@@ -32,7 +40,7 @@ in
};
config = lib.mkMerge [
(lib.mkIf config.my.dev.rust.enable {
users.users.jawz = { inherit packages; };
users.users = inputs.self.lib.mkUserAttrs lib config.my.dev.rust.users { inherit packages; };
})
{
environment.variables.CARGO_HOME = "\${XDG_DATA_HOME}/cargo";

View File

@@ -1,5 +1,6 @@
{
config,
inputs,
lib,
pkgs,
...
@@ -16,7 +17,14 @@ let
in
{
options = {
my.dev.sh.enable = lib.mkEnableOption "Install shell scripting tools globally";
my.dev.sh = {
enable = lib.mkEnableOption "Install shell scripting tools globally";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.dev;
description = "Users to install shell scripting packages for";
};
};
devShells.sh = lib.mkOption {
type = lib.types.package;
default = pkgs.mkShell {
@@ -30,6 +38,6 @@ in
};
};
config = lib.mkIf config.my.dev.sh.enable {
users.users.jawz = { inherit packages; };
users.users = inputs.self.lib.mkUserAttrs lib config.my.dev.sh.users { inherit packages; };
};
}

View File

@@ -1,5 +1,6 @@
{
config,
inputs,
lib,
pkgs,
...
@@ -14,7 +15,14 @@ let
in
{
options = {
my.dev.zig.enable = lib.mkEnableOption "Install Zig tooling globally";
my.dev.zig = {
enable = lib.mkEnableOption "Install Zig tooling globally";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.dev;
description = "Users to install Zig packages for";
};
};
devShells.zig = lib.mkOption {
type = lib.types.package;
default = pkgs.mkShell {
@@ -28,6 +36,6 @@ in
};
};
config = lib.mkIf config.my.dev.zig.enable {
users.users.jawz = { inherit packages; };
users.users = inputs.self.lib.mkUserAttrs lib config.my.dev.zig.users { inherit packages; };
};
}

View File

@@ -1,5 +1,6 @@
{
config,
inputs,
lib,
pkgs,
...
@@ -12,6 +13,11 @@
enable = lib.mkEnableOption "Whether to enable this script";
install = lib.mkEnableOption "Whether to install the script package";
service = lib.mkEnableOption "Whether to enable the script service";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.scripts;
description = "Users to install this script for";
};
name = lib.mkOption {
type = lib.types.str;
description = "Name of the script.";
@@ -36,10 +42,29 @@
description = "Configuration for multiple scripts.";
};
config = lib.mkIf (lib.any (s: s.enable) (lib.attrValues config.my.scripts)) {
users.users.jawz.packages =
config.my.scripts
|> lib.mapAttrsToList (_name: script: lib.optional (script.enable && script.install) script.package)
|> lib.flatten;
users.users =
let
scriptList =
config.my.scripts
|> lib.mapAttrsToList (_name: script: lib.optional (script.enable && script.install) script)
|> lib.flatten;
userMap = lib.foldl' (
acc: script:
let
users = inputs.self.lib.normalizeUsers script.users;
in
lib.foldl' (
acc': user:
acc'
// {
${user} = (acc'.${user} or [ ]) ++ [ script.package ];
}
) acc users
) { } scriptList;
in
lib.mkMerge (
lib.mapAttrsToList (user: packages: inputs.self.lib.mkUserPackages lib user packages) userMap
);
systemd.user.services =
config.my.scripts
|> lib.mapAttrs' (

View File

@@ -52,7 +52,12 @@ in
vps = "45.79.25.87";
wg-vps = "10.77.0.1";
wg-server = "10.77.0.2";
wg-g1 = "10.9.0.2";
wg-gs = "10.9.0.0";
wg-friend1 = "10.8.0.2";
wg-friend2 = "10.8.0.3";
wg-friend3 = "10.8.0.4";
wg-friend4 = "10.8.0.5";
wg-friends = "10.8.0.0";
};
description = "Set of IP's for all my computers.";
@@ -103,6 +108,31 @@ in
};
enableContainers = lib.mkEnableOption "container services (Docker/Podman)";
enableProxy = lib.mkEnableOption "nginx reverse proxy for services";
toggleUsers = lib.mkOption {
type = lib.types.attrsOf (lib.types.either lib.types.str (lib.types.listOf lib.types.str));
default = {
apps = "jawz";
dev = "jawz";
shell = "jawz";
scripts = "jawz";
services = "jawz";
stylix = "jawz";
};
description = "Map toggle categories to users. Can be a single user (string) or multiple users (list). Determines which user(s) get packages from each toggle category.";
example = {
apps = "jawz";
dev = "bearded_dragonn";
shell = "jawz";
gaming = [
"jawz"
"bearded_dragonn"
];
stylix = [
"jawz"
"bearded_dragonn"
];
};
};
};
config = {
assertions =

View File

@@ -7,6 +7,8 @@
let
proxyReverseServices = [
"firefox-syncserver"
"isso"
"plausible"
"readeck"
"microbin"
"ryot"

View File

@@ -6,11 +6,15 @@
}:
let
cfg = config.my.servers.gitea;
id = 969;
gid = id;
uid = id;
in
{
config = lib.mkIf (cfg.enable && config.my.secureHost) {
users.groups.gitea-runner = { };
users.groups.gitea-runner = { inherit gid; };
users.users.gitea-runner = {
inherit uid;
isSystemUser = true;
group = "gitea-runner";
extraGroups = [

View File

@@ -11,8 +11,10 @@
cloudflare-api.sopsFile = ../../secrets/env.yaml;
dns = {
sopsFile = ../../secrets/env.yaml;
owner = config.users.users.jawz.name;
inherit (config.users.users.jawz) group;
owner = config.users.users.${inputs.self.lib.getFirstUser config.my.scripts.update-dns.users}.name;
inherit (config.users.users.${inputs.self.lib.getFirstUser config.my.scripts.update-dns.users})
group
;
};
};
services.cloudflare-dyndns = {

View File

@@ -11,6 +11,11 @@ in
options.my.servers.audiobookshelf = setup.mkOptions "audiobookshelf" "audiobooks" 5687;
config = lib.mkIf (cfg.enable && config.my.secureHost) {
my.servers.audiobookshelf.enableSocket = true;
users.users.audiobookshelf = {
uid = 978;
group = "piracy";
isSystemUser = true;
};
services.audiobookshelf = {
inherit (cfg) enable port;
host = cfg.ip;

View File

@@ -6,11 +6,19 @@
let
setup = import ../factories/mkserver.nix { inherit lib config; };
cfg = config.my.servers.bazarr;
uid = 985;
in
{
options.my.servers.bazarr = setup.mkOptions "bazarr" "subs" config.services.bazarr.listenPort;
config.services.bazarr = lib.mkIf cfg.enable {
inherit (cfg) enable;
group = "piracy";
config = lib.mkIf cfg.enable {
users.users.bazarr = {
inherit uid;
group = "piracy";
isSystemUser = true;
};
services.bazarr = {
inherit (cfg) enable;
group = "piracy";
};
};
}

View File

@@ -15,6 +15,12 @@ in
options.my.servers.gitea = setup.mkOptions "gitea" "git" 9083;
config = lib.mkIf (cfg.enable && config.my.secureHost) {
sops.secrets.gitea.sopsFile = ../../secrets/env.yaml;
users.groups.gitea.gid = 974;
users.users.gitea = {
uid = 975;
isSystemUser = true;
group = "gitea";
};
services.gitea = {
inherit (cfg) enable;
settings = {
@@ -30,6 +36,10 @@ in
FROM = config.my.smtpemail;
SENDMAIL_PATH = "${pkgs.msmtp}/bin/msmtp";
};
service = {
DISABLE_REGISTRATION = true;
ALLOW_ONLY_EXTERNAL_REGISTRATION = true;
};
};
database = {
socket = config.my.postgresSocket;

39
modules/servers/isso.nix Normal file
View File

@@ -0,0 +1,39 @@
{
lib,
config,
...
}:
let
setup = import ../factories/mkserver.nix { inherit lib config; };
cfg = config.my.servers.isso;
in
{
options.my.servers.isso = setup.mkOptions "isso" "comments" 8180;
config = lib.mkIf (cfg.enable && config.my.secureHost) {
my.servers.isso.domain = "danilo-reyes.com";
sops.secrets.isso = {
sopsFile = ../../secrets/env.yaml;
};
services.isso = {
inherit (cfg) enable;
settings = {
guard.require-author = true;
server.listen = "http://${cfg.ip}:${toString cfg.port}/";
admin = {
enabled = true;
password = "$ISSO_ADMIN_PASSWORD";
};
general = {
host = "https://blog.${cfg.domain}";
max-age = "1h";
gravatar = true;
};
};
};
systemd.services.isso = {
after = [ "network-online.target" ];
wants = [ "network-online.target" ];
serviceConfig.EnvironmentFile = config.sops.secrets.isso.path;
};
};
}

View File

@@ -28,6 +28,11 @@ in
pkgs.jellyfin-ffmpeg
]
++ (lib.optional cfg.enableCron [ sub-sync-path ]);
users.users.jellyfin = {
uid = 984;
group = "piracy";
isSystemUser = true;
};
services = {
jellyfin = {
inherit (cfg) enable;

View File

@@ -6,6 +6,9 @@
let
setup = import ../factories/mkserver.nix { inherit lib config; };
cfg = config.my.servers.kavita;
id = 982;
gid = id;
uid = id;
in
{
options.my.servers.kavita = setup.mkOptions "kavita" "library" config.services.kavita.settings.Port;
@@ -14,7 +17,9 @@ in
owner = config.users.users.kavita.name;
inherit (config.users.users.kavita) group;
};
users.groups.kavita = { inherit gid; };
users.users.kavita = {
inherit uid;
isSystemUser = true;
group = "kavita";
extraGroups = [

View File

@@ -0,0 +1,44 @@
{
lib,
config,
inputs,
...
}:
let
setup = import ../factories/mkserver.nix { inherit lib config; };
cfg = config.my.servers.keycloak;
in
{
options.my.servers.keycloak = setup.mkOptions "keycloak" "auth" 8090;
config = lib.mkIf (cfg.enable && config.my.secureHost) {
sops.secrets.postgres-password.sopsFile = ../../secrets/secrets.yaml;
sops.secrets.keycloak = {
sopsFile = ../../secrets/env.yaml;
restartUnits = [ "keycloak.service" ];
};
services.keycloak = {
inherit (cfg) enable;
database = {
type = "postgresql";
host = "localhost";
createLocally = false;
username = "keycloak";
name = "keycloak";
passwordFile = config.sops.secrets.postgres-password.path;
};
settings = {
hostname = cfg.host;
hostname-strict = true;
hostname-strict-https = false;
http-enabled = true;
http-port = cfg.port;
http-host = cfg.ip;
proxy-headers = "xforwarded";
};
};
systemd.services.keycloak.serviceConfig.EnvironmentFile = config.sops.secrets.keycloak.path;
services.nginx.virtualHosts.${cfg.host} = lib.mkIf (cfg.enableProxy && config.my.enableProxy) (
inputs.self.lib.proxyReverseFix cfg
);
};
}

View File

@@ -17,7 +17,7 @@ in
TZ = config.my.timeZone;
DEFAULT_GROUP = "Home";
BASE_URL = cfg.url;
API_DOCS = "false";
API_DOCS = "true";
ALLOW_SIGNUP = "false";
DB_ENGINE = "postgres";
POSTGRES_URL_OVERRIDE = "postgresql://${cfg.name}:@/${cfg.name}?host=${config.my.postgresSocket}";
@@ -25,6 +25,13 @@ in
WEB_CONCURRENCY = "1";
SMTP_HOST = "smtp.gmail.com";
SMTP_PORT = "587";
OIDC_AUTH_ENABLED = "true";
OIDC_SIGNUP_ENABLED = "true";
OIDC_CLIENT_ID = "mealie";
OIDC_ADMIN_GROUP = "/admins";
OIDC_USER_CLAIM = "email";
OIDC_PROVIDER_NAME = "keycloak";
OIDC_SIGNING_ALGORITHM = "RS256";
};
credentialsFile = config.sops.secrets.mealie.path;
};

View File

@@ -10,7 +10,7 @@ in
{
options.my.servers.metube = setup.mkOptions "metube" "bajameesta" 8881;
config.virtualisation.oci-containers.containers.metube = lib.mkIf cfg.enable {
image = "ghcr.io/alexta69/metube:latest";
image = "ghcr.io/alexta69/metube:2026.01.02";
ports = [ "${toString cfg.port}:8081" ];
volumes = [
"${config.my.containerData}/metube:/downloads"

View File

@@ -32,6 +32,9 @@ let
pytensorflow = pkgs.python3.withPackages (ps: [ ps.tensorflow ]);
cfg = config.my.servers.nextcloud;
cfgC = config.my.servers.collabora;
id = 990;
gid = id;
uid = id;
in
{
options.my.servers = {
@@ -48,8 +51,11 @@ in
"nodejs-14.21.3"
"openssl-1.1.1v"
];
users.groups.nextcloud = { inherit gid; };
users.users.nextcloud = {
inherit uid;
isSystemUser = true;
group = "nextcloud";
extraGroups = [ "render" ];
packages = builtins.attrValues {
inherit exiftool pytensorflow;

View File

@@ -0,0 +1,60 @@
{
lib,
config,
...
}:
let
setup = import ../factories/mkserver.nix { inherit lib config; };
cfg = config.my.servers.oauth2-proxy;
id = 967;
gid = id;
uid = id;
in
{
options.my.servers.oauth2-proxy = setup.mkOptions "oauth2-proxy" "auth-proxy" 4180;
config = lib.mkIf (cfg.enable && config.my.secureHost) {
users.groups.oauth2-proxy = { inherit gid; };
users.users.oauth2-proxy = {
inherit uid;
isSystemUser = true;
group = "oauth2-proxy";
};
sops.secrets.oauth2-proxy = {
sopsFile = ../../secrets/env.yaml;
restartUnits = [ "oauth2-proxy.service" ];
};
sops.secrets.oauth2-proxy-cookie = {
sopsFile = ../../secrets/secrets.yaml;
restartUnits = [ "oauth2-proxy.service" ];
};
services.oauth2-proxy = {
inherit (cfg) enable;
provider = "keycloak-oidc";
clientID = "oauth2-proxy";
keyFile = config.sops.secrets.oauth2-proxy.path;
oidcIssuerUrl = "${config.my.servers.keycloak.url}/realms/homelab";
httpAddress = "${cfg.ip}:${toString cfg.port}";
email.domains = [ "*" ];
cookie = {
name = "_oauth2_proxy";
secure = true;
expire = "168h";
refresh = "1h";
domain = ".lebubu.org";
secret = config.sops.secrets.oauth2-proxy-cookie.path;
};
extraConfig = {
skip-auth-route = [ "^/ping$" ];
set-xauthrequest = true;
pass-access-token = true;
pass-user-headers = true;
request-logging = true;
auth-logging = true;
session-store-type = "cookie";
skip-provider-button = true;
code-challenge-method = "S256";
whitelist-domain = [ ".lebubu.org" ];
};
};
};
}

View File

@@ -1,21 +1,37 @@
{ lib, config, ... }:
let
cfg = config.my.servers.paperless;
inherit (config.services.paperless) port;
id = 315;
gid = id;
uid = id;
in
{
options.my.servers.paperless.enable = lib.mkEnableOption "Paperless-ngx document management system";
config = lib.mkIf (cfg.enable && config.my.servers.postgres.enable) {
networking.firewall.allowedTCPPorts = [ config.services.paperless.port ];
networking.firewall.allowedTCPPorts = [ port ];
users.groups.paperless = { inherit gid; };
users.users.paperless = {
inherit uid;
isSystemUser = true;
group = "paperless";
};
services.paperless = {
inherit (cfg) enable;
address = "0.0.0.0";
address = config.my.ips.server;
consumptionDirIsPublic = true;
consumptionDir = "/srv/pool/scans/";
settings = {
PAPERLESS_ACCOUNT_DEFAULT_HTTP_PROTOCOL = "http";
PAPERLESS_URL = "http://${config.my.ips.server}:${builtins.toString port}";
PAPERLESS_DBENGINE = "postgress";
PAPERLESS_DBNAME = "paperless";
PAPERLESS_DBHOST = config.my.postgresSocket;
PAPERLESS_TIME_ZONE = config.my.timeZone;
PAPERLESS_APPS = "allauth.socialaccount.providers.openid_connect";
PAPERLESS_ACCOUNT_ALLOW_SIGNUPS = false;
PAPERLESS_SOCIALACCOUNT_ALLOW_SIGNUPS = true;
PAPERLESS_SOCIAL_AUTO_SIGNUP = true;
PAPERLESS_CONSUMER_IGNORE_PATTERN = builtins.toJSON [
".DS_STORE/*"
"desktop.ini"

View File

@@ -0,0 +1,27 @@
{
lib,
config,
...
}:
let
setup = import ../factories/mkserver.nix { inherit lib config; };
cfg = config.my.servers.plausible;
in
{
options.my.servers.plausible = setup.mkOptions "plausible" "analytics" 8439;
config = lib.mkIf (cfg.enable && config.my.secureHost) {
sops.secrets.plausible.sopsFile = ../../secrets/secrets.yaml;
services.plausible = {
inherit (cfg) enable;
database.postgres.socket = config.my.postgresSocket;
mail.email = config.my.smtpemail;
server = {
inherit (cfg) port;
baseUrl = cfg.url;
listenAddress = cfg.ip;
secretKeybaseFile = config.sops.secrets.plausible.path;
disableRegistration = true;
};
};
};
}

View File

@@ -9,42 +9,49 @@ let
in
{
options.my.servers.plex = setup.mkOptions "plex" "plex" 32400;
config.services = lib.mkIf (cfg.enable && config.my.secureHost) {
plex = {
inherit (cfg) enable;
config = lib.mkIf (cfg.enable && config.my.secureHost) {
users.users.plex = {
uid = 193;
group = "piracy";
isSystemUser = true;
};
nginx = lib.mkIf cfg.enableProxy {
virtualHosts."${cfg.host}" = {
forceSSL = true;
enableACME = true;
http2 = true;
serverAliases = [
"plex.rotehaare.art"
];
extraConfig = ''
# Some players don't reopen a socket and playback stops totally instead of resuming after an extended pause
send_timeout 100m;
# Plex headers
proxy_set_header X-Plex-Client-Identifier $http_x_plex_client_identifier;
proxy_set_header X-Plex-Device $http_x_plex_device;
proxy_set_header X-Plex-Device-Name $http_x_plex_device_name;
proxy_set_header X-Plex-Platform $http_x_plex_platform;
proxy_set_header X-Plex-Platform-Version $http_x_plex_platform_version;
proxy_set_header X-Plex-Product $http_x_plex_product;
proxy_set_header X-Plex-Token $http_x_plex_token;
proxy_set_header X-Plex-Version $http_x_plex_version;
proxy_set_header X-Plex-Nocache $http_x_plex_nocache;
proxy_set_header X-Plex-Provides $http_x_plex_provides;
proxy_set_header X-Plex-Device-Vendor $http_x_plex_device_vendor;
proxy_set_header X-Plex-Model $http_x_plex_model;
# Buffering off send to the client as soon as the data is received from Plex.
proxy_redirect off;
proxy_buffering off;
'';
locations."/" = {
proxyPass = cfg.local;
proxyWebsockets = true;
services = {
plex = {
inherit (cfg) enable;
group = "piracy";
};
nginx = lib.mkIf cfg.enableProxy {
virtualHosts."${cfg.host}" = {
forceSSL = true;
enableACME = true;
http2 = true;
serverAliases = [
"plex.rotehaare.art"
];
extraConfig = ''
# Some players don't reopen a socket and playback stops totally instead of resuming after an extended pause
send_timeout 100m;
# Plex headers
proxy_set_header X-Plex-Client-Identifier $http_x_plex_client_identifier;
proxy_set_header X-Plex-Device $http_x_plex_device;
proxy_set_header X-Plex-Device-Name $http_x_plex_device_name;
proxy_set_header X-Plex-Platform $http_x_plex_platform;
proxy_set_header X-Plex-Platform-Version $http_x_plex_platform_version;
proxy_set_header X-Plex-Product $http_x_plex_product;
proxy_set_header X-Plex-Token $http_x_plex_token;
proxy_set_header X-Plex-Version $http_x_plex_version;
proxy_set_header X-Plex-Nocache $http_x_plex_nocache;
proxy_set_header X-Plex-Provides $http_x_plex_provides;
proxy_set_header X-Plex-Device-Vendor $http_x_plex_device_vendor;
proxy_set_header X-Plex-Model $http_x_plex_model;
# Buffering off send to the client as soon as the data is received from Plex.
proxy_redirect off;
proxy_buffering off;
'';
locations."/" = {
proxyPass = cfg.local;
proxyWebsockets = true;
};
};
};
};

View File

@@ -40,6 +40,8 @@ let
"sonarqube"
"gitea"
"atticd"
"keycloak"
"webref"
];
in
{

View File

@@ -11,6 +11,7 @@ in
options.my.servers.prowlarr = setup.mkOptions "prowlarr" "indexer" 9696;
config = lib.mkIf cfg.enable {
users.users.prowlarr = {
uid = 987;
group = "piracy";
isSystemUser = true;
};

View File

@@ -7,10 +7,6 @@
}:
let
inherit (inputs) qbit_manage;
pkgsU = import inputs.nixpkgs-unstable {
system = "x86_64-linux";
config.allowUnfree = true;
};
vuetorrent = pkgs.fetchzip {
url = "https://github.com/VueTorrent/VueTorrent/releases/download/v2.31.0/vuetorrent.zip";
sha256 = "sha256-kVDnDoCoJlY2Ew71lEMeE67kNOrKTJEMqNj2OfP01qw=";

View File

@@ -10,6 +10,11 @@ in
{
options.my.servers.radarr = setup.mkOptions "radarr" "movies" 7878;
config = lib.mkIf cfg.enable {
users.users.radarr = {
uid = 275;
group = "piracy";
isSystemUser = true;
};
services.radarr = {
inherit (cfg) enable;
group = "piracy";

View File

@@ -12,7 +12,7 @@ in
config = lib.mkIf (cfg.enable && config.my.servers.postgres.enable && config.my.secureHost) {
sops.secrets.ryot.sopsFile = ../../secrets/env.yaml;
virtualisation.oci-containers.containers.ryot = {
image = "ghcr.io/ignisda/ryot:v9.5.0";
image = "ghcr.io/ignisda/ryot:v10";
ports = [ "${toString cfg.port}:8000" ];
environmentFiles = [ config.sops.secrets.ryot.path ];
environment = {

View File

@@ -9,8 +9,15 @@ let
in
{
options.my.servers.sonarr = setup.mkOptions "sonarr" "series" 8989;
config.services.sonarr = lib.mkIf cfg.enable {
inherit (cfg) enable;
group = "piracy";
config = lib.mkIf cfg.enable {
users.users.sonarr = {
uid = 274;
group = "piracy";
isSystemUser = true;
};
services.sonarr = {
inherit (cfg) enable;
group = "piracy";
};
};
}

View File

@@ -37,7 +37,7 @@ in
};
services.stash = {
inherit (cfg) enable;
group = "piracy";
group = "glue";
mutableSettings = true;
username = "Suing8150";
passwordFile = config.sops.secrets."stash/password".path;
@@ -59,12 +59,15 @@ in
LD_LIBRARY_PATH = "${pkgs.stdenv.cc.cc.lib}/lib:${pkgs.glibc}/lib:${pkgs.zlib}/lib:${pkgs.libffi}/lib:${pkgs.openssl}/lib";
};
serviceConfig = {
PrivateUsers = lib.mkForce false;
BindReadOnlyPaths = lib.mkForce [ ];
BindPaths = lib.mkIf (cfgS.settings != { }) (map (stash: "${stash.path}") cfgS.settings.stash);
};
};
users.users.stash = {
uid = 974;
isSystemUser = true;
group = "glue";
packages = [ stashPythonFHS ];
};
};

View File

@@ -16,6 +16,9 @@ let
add_header Access-Control-Allow-Origin *;
return 200 '${builtins.toJSON data}';
'';
id = 224;
gid = id;
uid = id;
in
{
options.my.servers = {
@@ -27,6 +30,12 @@ in
synapse = { inherit domain; };
element = { inherit domain; };
};
users.groups.matrix-synapse = { inherit gid; };
users.users.matrix-synapse = {
inherit uid;
isSystemUser = true;
group = "matrix-synapse";
};
sops.secrets = {
synapse = {
sopsFile = ../../secrets/env.yaml;
@@ -62,6 +71,7 @@ in
federation_domain_whitelist = [ ];
allow_public_rooms_without_auth = false;
allow_public_rooms_over_federation = false;
registration_shared_secret = config.sops.secrets.synapse.path;
max_upload_size = "4096M";
tls_private_key_path = config.sops.secrets."matrix/key".path;
tls_certificate_path = config.sops.secrets."matrix/cert".path;

View File

@@ -7,11 +7,20 @@
let
cfg = config.my.servers.vaultwarden;
setup = import ../factories/mkserver.nix { inherit lib config; };
id = 981;
gid = id;
uid = id;
in
{
options.my.servers.vaultwarden = setup.mkOptions "vaultwarden" "vault" 8222;
config = lib.mkIf (cfg.enable && config.my.servers.postgres.enable && config.my.secureHost) {
sops.secrets.vaultwarden.sopsFile = ../../secrets/env.yaml;
users.groups.vaultwarden = { inherit gid; };
users.users.vaultwarden = {
inherit uid;
isSystemUser = true;
group = "vaultwarden";
};
services.vaultwarden = {
inherit (cfg) enable;
dbBackend = "postgresql";

View File

@@ -1,5 +1,6 @@
{
config,
inputs,
lib,
pkgs,
...
@@ -11,9 +12,20 @@ let
];
in
{
options.my.services.printing.enable = lib.mkEnableOption "printing services and drivers";
options.my.services.printing = {
enable = lib.mkEnableOption "printing services and drivers";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.services;
description = "Users to install printing packages for";
};
};
config = lib.mkIf config.my.services.printing.enable {
users.users.jawz.packages = [ pkgs.simple-scan ];
users.users =
let
packages = [ pkgs.simple-scan ];
in
inputs.self.lib.mkUserPackages lib config.my.services.printing.users packages;
services.printing = {
enable = true;
drivers = printingDrivers;

View File

@@ -1,7 +1,6 @@
{
config,
lib,
inputs,
...
}:
{

View File

@@ -53,20 +53,27 @@ in
user = "jawz";
password = config.sops.secrets.syncthing_password.path;
};
devices = {
server.id = "BG6PF7S-KATABWO-7WAZFMX-6YO7IS3-WQTMR3M-VSOSV7V-HFFMNNH-BFX2EQ4";
miniserver.id = "HDYEGIR-GFU7ONK-MOOJUFH-N3L3XHX-SXWN3FI-O23K6LD-BJENQK5-VIPV2AT";
workstation.id = "4E4KJ6M-MSTNBVF-D7CNHDW-DUTB3VR-SXKZ4NH-ZKAOMF5-V24JECJ-4STSZAA";
galaxy.id = "UAZ5YDV-YUFBXOY-QMS6S6R-WPIIKZI-4OPPW5L-G4OVUPO-YW5KFYY-YASRAAV";
phone.id = "OSOX2VZ-AO2SA3C-BFB6NKF-K6CR6WX-64TDBKW-RRKEKJ4-FKZE5CV-J2RGJAJ";
wg-friend1 = {
id = "XBIYCD4-EFKS5SK-WFF73CU-P37GXVH-OMWEIA4-6KC5F3L-U5UQWSF-SYNNRQF";
addresses = [ "tcp://${config.my.ips.wg-friend1}:22000" ];
introducer = false;
autoAcceptFolders = false;
paused = false;
devices =
let
mkWgDevice = name: id: {
inherit id;
addresses = [ "tcp://${config.my.ips.${name}}:22000" ];
introducer = false;
autoAcceptFolders = false;
paused = false;
};
in
{
server.id = "BG6PF7S-KATABWO-7WAZFMX-6YO7IS3-WQTMR3M-VSOSV7V-HFFMNNH-BFX2EQ4";
miniserver.id = "HDYEGIR-GFU7ONK-MOOJUFH-N3L3XHX-SXWN3FI-O23K6LD-BJENQK5-VIPV2AT";
workstation.id = "4E4KJ6M-MSTNBVF-D7CNHDW-DUTB3VR-SXKZ4NH-ZKAOMF5-V24JECJ-4STSZAA";
galaxy.id = "UAZ5YDV-YUFBXOY-QMS6S6R-WPIIKZI-4OPPW5L-G4OVUPO-YW5KFYY-YASRAAV";
phone.id = "OSOX2VZ-AO2SA3C-BFB6NKF-K6CR6WX-64TDBKW-RRKEKJ4-FKZE5CV-J2RGJAJ";
wg-friend1 = mkWgDevice "wg-friend1" "XBIYCD4-EFKS5SK-WFF73CU-P37GXVH-OMWEIA4-6KC5F3L-U5UQWSF-SYNNRQF";
wg-friend2 = mkWgDevice "wg-friend2" "XBIYCD4-EFKS5SK-WFF73CU-P37GXVH-OMWEIA4-6KC5F3L-U5UQWSF-SYNNRQF";
wg-friend3 = mkWgDevice "wg-friend3" "XBIYCD4-EFKS5SK-WFF73CU-P37GXVH-OMWEIA4-6KC5F3L-U5UQWSF-SYNNRQF";
wg-friend4 = mkWgDevice "wg-friend4" "7YPUQ4Y-2UVEAXI-KBQVU7R-B6R5O36-GDQPTOY-3R3OG7H-BVWVOTD-EX52VQM";
};
};
folders = {
cache = mkMobile "~/Downloads/cache/";
friends = mkMobile "~/Pictures/Artist/friends/";
@@ -108,6 +115,19 @@ in
path = "~/Pictures/Encrypted/friends";
ignorePerms = false;
type = "sendreceive";
devices = [
"server"
"workstation"
"wg-friend1"
"wg-friend2"
"wg-friend3"
"wg-friend4"
];
};
family_share = {
path = "~/Pictures/Encrypted/family";
ignorePerms = false;
type = "sendreceive";
devices = [
"server"
"workstation"

View File

@@ -1,20 +1,32 @@
{
config,
inputs,
lib,
pkgs,
...
}:
{
options.my.shell.type = lib.mkOption {
type = lib.types.enum [
"bash"
"zsh"
];
default = "bash";
description = "The shell to use system-wide (bash or zsh)";
options.my.shell = {
type = lib.mkOption {
type = lib.types.enum [
"bash"
"zsh"
];
default = "bash";
description = "The shell to use system-wide (bash or zsh)";
};
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.shell;
description = "Users to configure shell for";
};
};
config = {
users.users.jawz.shell = pkgs.${config.my.shell.type};
users.users = lib.mkMerge (
map (user: {
${user}.shell = pkgs.${config.my.shell.type};
}) (inputs.self.lib.normalizeUsers config.my.shell.users)
);
programs.zsh.enable = config.my.shell.type == "zsh";
};
}

View File

@@ -1,17 +1,29 @@
{
config,
inputs,
lib,
pkgs,
...
}:
{
options.my.shell.exercism.enable = lib.mkEnableOption "Exercism coding practice platform";
config = lib.mkIf config.my.shell.exercism.enable {
users.users.jawz.packages = builtins.attrValues {
inherit (pkgs)
exercism # learn to code
bats # testing system, required by Exercism
;
options.my.shell.exercism = {
enable = lib.mkEnableOption "Exercism coding practice platform";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.shell;
description = "Users to install Exercism for";
};
};
config = lib.mkIf config.my.shell.exercism.enable {
users.users =
let
packages = builtins.attrValues {
inherit (pkgs)
exercism # learn to code
bats # testing system, required by Exercism
;
};
in
inputs.self.lib.mkUserPackages lib config.my.shell.exercism.users packages;
};
}

View File

@@ -6,43 +6,62 @@
...
}:
{
options.my.shell.multimedia.enable = lib.mkEnableOption "multimedia CLI tools and codecs";
config = lib.mkIf config.my.shell.multimedia.enable {
sops.secrets."gallery-dl/secrets" = {
sopsFile = ../../secrets/gallery.yaml;
owner = "jawz";
mode = "0400";
};
home-manager.users.jawz.programs = {
yt-dlp = {
enable = true;
settings = {
embed-thumbnail = true;
embed-subs = true;
sub-langs = "all";
cookies-from-browser = "firefox+gnomekeyring:/home/jawz/.librewolf/jawz";
};
};
gallery-dl = {
enable = true;
settings = inputs.self.lib.importDotfile ../../dotfiles/gallery-dl.nix;
};
${config.my.shell.type} = {
initExtra = lib.mkAfter ''
if [ -r "${config.sops.secrets."gallery-dl/secrets".path}" ]; then
set -a # automatically export all variables
source "${config.sops.secrets."gallery-dl/secrets".path}"
set +a # stop automatically exporting
fi
'';
};
};
users.users.jawz.packages = builtins.attrValues {
inherit (pkgs)
ffmpeg # not ffmpreg, the coolest video conversion tool!
imagemagick # photoshop what??
ffpb # make ffmpeg encoding... a bit fun
;
options.my.shell.multimedia = {
enable = lib.mkEnableOption "multimedia CLI tools and codecs";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.shell;
description = "Users to install multimedia shell tools for";
};
};
config = lib.mkIf config.my.shell.multimedia.enable {
sops.secrets."gallery-dl/secrets" =
let
user = inputs.self.lib.getFirstUser config.my.shell.multimedia.users;
in
{
sopsFile = ../../secrets/gallery.yaml;
owner = user;
mode = "0400";
};
home-manager.users =
inputs.self.lib.mkHomeManagerUsers lib config.my.shell.multimedia.users
(user: {
programs = {
yt-dlp = {
enable = true;
settings = {
embed-thumbnail = true;
embed-subs = true;
sub-langs = "all";
cookies-from-browser = "firefox+gnomekeyring:/home/${user}/.librewolf/${user}";
};
};
gallery-dl = {
enable = true;
settings = inputs.self.lib.importDotfile ../../dotfiles/gallery-dl.nix;
};
${config.my.shell.type} = {
initExtra = lib.mkAfter ''
if [ -r "${config.sops.secrets."gallery-dl/secrets".path}" ]; then
set -a # automatically export all variables
source "${config.sops.secrets."gallery-dl/secrets".path}"
set +a # stop automatically exporting
fi
'';
};
};
});
users.users =
let
packages = builtins.attrValues {
inherit (pkgs)
ffmpeg # not ffmpreg, the coolest video conversion tool!
imagemagick # photoshop what??
ffpb # make ffmpeg encoding... a bit fun
;
};
in
inputs.self.lib.mkUserPackages lib config.my.shell.multimedia.users packages;
};
}

View File

@@ -9,82 +9,91 @@ let
shellType = config.my.shell.type;
in
{
options.my.shell.tools.enable = lib.mkEnableOption "shell tools and utilities";
config = lib.mkIf config.my.shell.tools.enable {
home-manager.users.jawz.programs = {
hstr.enable = true;
htop = {
enable = true;
package = pkgs.htop-vim;
};
eza = {
enable = true;
git = true;
icons = "auto";
};
zoxide = {
enable = true;
enableBashIntegration = shellType == "bash";
enableZshIntegration = shellType == "zsh";
};
bat = {
enable = true;
config.pager = "less -FR";
extraPackages = builtins.attrValues {
inherit (pkgs.bat-extras)
batman # man pages
batpipe # piping
batgrep # ripgrep
batdiff # this is getting crazy!
batwatch # probably my next best friend
prettybat # trans your sourcecode!
;
};
};
password-store = {
enable = false;
package = pkgs.gopass;
settings = {
PASSWORD_STORE_AUTOCLIP = "true";
PASSWORD_STORE_AUTOIMPORT = "false";
PASSWORD_STORE_CLIPTIMEOUT = "45";
PASSWORD_STORE_EXPORTKEYS = "false";
PASSWORD_STORE_NOPAGER = "false";
PASSWORD_STORE_NOTIFICATIONS = "false";
PASSWORD_STORE_PARSING = "true";
PASSWORD_STORE_PATH = "/home/jawz/.local/share/pass";
PASSWORD_STORE_SAFECONTENT = "true";
};
};
${shellType} = {
shellAliases = inputs.self.lib.mergeAliases inputs.self.lib.commonAliases {
cd = "z";
hh = "hstr";
ls = "eza --icons --group-directories-first";
rm = "trash";
b = "bat";
f = "fzf --multi --exact -i";
unique-extensions = ''
fd -tf | rev | cut -d. -f1 | rev |
tr '[:upper:]' '[:lower:]' | sort |
uniq --count | sort -rn'';
};
}
//
inputs.self.lib.shellConditional shellType
''
if command -v fzf-share >/dev/null; then
source "$(fzf-share)/key-bindings.bash"
source "$(fzf-share)/completion.bash"
fi
''
''
if command -v fzf-share >/dev/null; then
source "$(fzf-share)/key-bindings.bash"
source "$(fzf-share)/completion.bash"
fi
'';
options.my.shell.tools = {
enable = lib.mkEnableOption "shell tools and utilities";
users = lib.mkOption {
type = inputs.self.lib.usersOptionType lib;
default = config.my.toggleUsers.shell;
description = "Users to install shell tools for";
};
};
config = lib.mkIf config.my.shell.tools.enable {
home-manager.users = inputs.self.lib.mkHomeManagerUsers lib config.my.shell.tools.users (user: {
programs = {
hstr.enable = true;
htop = {
enable = true;
package = pkgs.htop-vim;
};
eza = {
enable = true;
git = true;
icons = "auto";
};
zoxide = {
enable = true;
enableBashIntegration = shellType == "bash";
enableZshIntegration = shellType == "zsh";
};
bat = {
enable = true;
config.pager = "less -FR";
extraPackages = builtins.attrValues {
inherit (pkgs.bat-extras)
batman # man pages
batpipe # piping
batgrep # ripgrep
batdiff # this is getting crazy!
batwatch # probably my next best friend
prettybat # trans your sourcecode!
;
};
};
password-store = {
enable = false;
package = pkgs.gopass;
settings = {
PASSWORD_STORE_AUTOCLIP = "true";
PASSWORD_STORE_AUTOIMPORT = "false";
PASSWORD_STORE_CLIPTIMEOUT = "45";
PASSWORD_STORE_EXPORTKEYS = "false";
PASSWORD_STORE_NOPAGER = "false";
PASSWORD_STORE_NOTIFICATIONS = "false";
PASSWORD_STORE_PARSING = "true";
PASSWORD_STORE_PATH = "/home/${user}/.local/share/pass";
PASSWORD_STORE_SAFECONTENT = "true";
};
};
${shellType} = {
shellAliases = inputs.self.lib.mergeAliases inputs.self.lib.commonAliases {
cd = "z";
hh = "hstr";
ls = "eza --icons --group-directories-first";
rm = "trash";
b = "bat";
f = "fzf --multi --exact -i";
unique-extensions = ''
fd -tf | rev | cut -d. -f1 | rev |
tr '[:upper:]' '[:lower:]' | sort |
uniq --count | sort -rn'';
};
}
//
inputs.self.lib.shellConditional shellType
''
if command -v fzf-share >/dev/null; then
source "$(fzf-share)/key-bindings.bash"
source "$(fzf-share)/completion.bash"
fi
''
''
if command -v fzf-share >/dev/null; then
source "$(fzf-share)/key-bindings.bash"
source "$(fzf-share)/completion.bash"
fi
'';
};
});
programs = {
starship.enable = true;
tmux.enable = true;
@@ -94,21 +103,25 @@ in
vimAlias = true;
};
};
users.users.jawz.packages = builtins.attrValues {
inherit (pkgs)
ripgrep # modern grep
dust # rusty du similar to gdu
fd # modern find, faster searches
fzf # fuzzy finder! super cool and useful
gdu # disk-space utility checker, somewhat useful
tealdeer # man for retards
trash-cli # oop! did not meant to delete that
jq # json parser
yq # yaml parser
smartmontools # check hard drie health
rmlint # amazing dupe finder that integrates well with BTRFS
;
};
users.users =
let
packages = builtins.attrValues {
inherit (pkgs)
ripgrep # modern grep
dust # rusty du similar to gdu
fd # modern find, faster searches
fzf # fuzzy finder! super cool and useful
gdu # disk-space utility checker, somewhat useful
tealdeer # man for retards
trash-cli # oop! did not meant to delete that
jq # json parser
yq # yaml parser
smartmontools # check hard drie health
rmlint # amazing dupe finder that integrates well with BTRFS
;
};
in
inputs.self.lib.mkUserPackages lib config.my.shell.tools.users packages;
environment.variables = {
HISTFILE = "\${XDG_STATE_HOME}/bash/history";
LESSHISTFILE = "-";

View File

@@ -32,7 +32,8 @@
groups.nixremote.gid = config.my.users.nixremote.gid;
users.nixremote = {
inherit (config.my.users.nixremote) home;
isNormalUser = true;
uid = 979;
isSystemUser = true;
createHome = true;
group = "nixremote";
openssh.authorizedKeys.keyFiles = config.my.users.nixremote.authorizedKeys;

View File

@@ -175,6 +175,13 @@ in
inherit name;
value.enable = true;
};
mkEnabledWithUsers = name: {
inherit name;
value = {
enable = true;
users = "jawz";
};
};
mkEnabledWithProxy = name: {
inherit name;
value = {
@@ -213,6 +220,44 @@ in
windows_vm = ../secrets/ssh/ed25519_windows_vm.pub;
};
getSshKeys = keyNames: keyNames |> map (name: inputs.self.lib.sshKeys.${name});
# Helper functions for multi-user toggle support
normalizeUsers = users: if builtins.isString users then [ users ] else users;
mkUserPackages =
lib: users: packages:
lib.mkMerge (
map (user: {
${user}.packages = packages;
}) (inputs.self.lib.normalizeUsers users)
);
mkUserAttrs =
lib: users: attrs:
lib.mkMerge (
map (user: {
${user} = attrs;
}) (inputs.self.lib.normalizeUsers users)
);
mkHomeManagerUsers =
lib: users: fn:
lib.mkMerge (
map (user: {
${user} = fn user;
}) (inputs.self.lib.normalizeUsers users)
);
getFirstUser = users: if builtins.isString users then users else (builtins.head users);
usersOptionType =
lib:
lib.mkOptionType {
name = "usersOption";
description = "Either a single user (string) or multiple users (list of strings)";
check = x: builtins.isString x || (builtins.isList x && lib.all builtins.isString x);
merge =
_loc: defs:
let
normalize = users: if builtins.isString users then [ users ] else users;
allUsers = lib.foldl' (acc: def: acc ++ (normalize def.value)) [ ] defs;
in
lib.unique allUsers;
};
};
};
}

Some files were not shown because too many files have changed in this diff Show More