From 011204188d5a28083ed654ab941a708ec66101cb Mon Sep 17 00:00:00 2001 From: Danilo Reyes Date: Sat, 1 Nov 2025 23:55:07 -0600 Subject: [PATCH] Update project configuration and documentation for Reference Board Viewer. Add .direnv support for environment management, enhance README with quick start instructions, and update flake.nix with additional dependencies including pydantic-settings and bcrypt. Introduce quick-start.sh and test-auth.sh scripts for streamlined setup and authentication testing. Remove obsolete planning and task documents to clean up the repository. --- .direnv/nix-profile-25.05-l6dvcwx15645vi6d | 1 - .direnv/nix-profile-25.05-l6dvcwx15645vi6d.rc | 2088 ----------------- .gitignore | 1 + README.md | 27 +- backend/app/api/auth.py | 6 +- flake.nix | 4 + frontend/src/routes/register/+page.svelte | 2 +- scripts/quick-start.sh | 144 ++ scripts/test-auth.sh | 145 ++ .../PLANNING-COMPLETE.md | 391 --- .../TASKS-GENERATED.md | 283 --- .../VERIFICATION-COMPLETE.md | 331 --- specs/001-reference-board-viewer/tasks.md | 10 +- 13 files changed, 329 insertions(+), 3104 deletions(-) delete mode 120000 .direnv/nix-profile-25.05-l6dvcwx15645vi6d delete mode 100644 .direnv/nix-profile-25.05-l6dvcwx15645vi6d.rc create mode 100755 scripts/quick-start.sh create mode 100755 scripts/test-auth.sh delete mode 100644 specs/001-reference-board-viewer/PLANNING-COMPLETE.md delete mode 100644 specs/001-reference-board-viewer/TASKS-GENERATED.md delete mode 100644 specs/001-reference-board-viewer/VERIFICATION-COMPLETE.md diff --git a/.direnv/nix-profile-25.05-l6dvcwx15645vi6d b/.direnv/nix-profile-25.05-l6dvcwx15645vi6d deleted file mode 120000 index 42f8a77..0000000 --- a/.direnv/nix-profile-25.05-l6dvcwx15645vi6d +++ /dev/null @@ -1 +0,0 @@ -/nix/store/fw0ymh1b25q3x97wskwkl0n67d73irj1-nix-shell-env \ No newline at end of file diff --git a/.direnv/nix-profile-25.05-l6dvcwx15645vi6d.rc b/.direnv/nix-profile-25.05-l6dvcwx15645vi6d.rc deleted file mode 100644 index 3dbe460..0000000 --- a/.direnv/nix-profile-25.05-l6dvcwx15645vi6d.rc +++ /dev/null @@ -1,2088 +0,0 @@ -unset shellHook -PATH=${PATH:-} -nix_saved_PATH="$PATH" -XDG_DATA_DIRS=${XDG_DATA_DIRS:-} -nix_saved_XDG_DATA_DIRS="$XDG_DATA_DIRS" -AR='ar' -export AR -AS='as' -export AS -BASH='/nix/store/cfqbabpc7xwg8akbcchqbq3cai6qq2vs-bash-5.2p37/bin/bash' -CC='gcc' -export CC -CONFIG_SHELL='/nix/store/cfqbabpc7xwg8akbcchqbq3cai6qq2vs-bash-5.2p37/bin/bash' -export CONFIG_SHELL -CXX='g++' -export CXX -HOSTTYPE='x86_64' -HOST_PATH='/nix/store/rry6qingvsrqmc7ll7jgaqpybcbdgf5v-coreutils-9.7/bin:/nix/store/392hs9nhm6wfw4imjllbvb1wil1n39qx-findutils-4.10.0/bin:/nix/store/xw0mf3shymq3k7zlncf09rm8917sdi4h-diffutils-3.12/bin:/nix/store/4rpiqv9yr2pw5094v4wc33ijkqjpm9sa-gnused-4.9/bin:/nix/store/l2wvwyg680h0v2la18hz3yiznxy2naqw-gnugrep-3.11/bin:/nix/store/c1z5j28ndxljf1ihqzag57bwpfpzms0g-gawk-5.3.2/bin:/nix/store/w60s4xh1pjg6dwbw7j0b4xzlpp88q5qg-gnutar-1.35/bin:/nix/store/xd9m9jkvrs8pbxvmkzkwviql33rd090j-gzip-1.14/bin:/nix/store/w1pxx760yidi7n9vbi5bhpii9xxl5vdj-bzip2-1.0.8-bin/bin:/nix/store/xk0d14zpm0njxzdm182dd722aqhav2cc-gnumake-4.4.1/bin:/nix/store/cfqbabpc7xwg8akbcchqbq3cai6qq2vs-bash-5.2p37/bin:/nix/store/gj54zvf7vxll1mzzmqhqi1p4jiws3mfb-patch-2.7.6/bin:/nix/store/22rpb6790f346c55iqi6s9drr5qgmyjf-xz-5.8.1-bin/bin:/nix/store/xlmpcglsq8l09qh03rf0virz0331pjdc-file-5.45/bin' -export HOST_PATH -IFS=' -' -IN_NIX_SHELL='impure' -export IN_NIX_SHELL -LD='ld' -export LD -LINENO='76' -MACHTYPE='x86_64-pc-linux-gnu' -NIX_BINTOOLS='/nix/store/87zpmcmwvn48z4lbrfba74b312h22s6c-binutils-wrapper-2.44' -export NIX_BINTOOLS -NIX_BINTOOLS_WRAPPER_TARGET_HOST_x86_64_unknown_linux_gnu='1' -export NIX_BINTOOLS_WRAPPER_TARGET_HOST_x86_64_unknown_linux_gnu -NIX_BUILD_CORES='8' -export NIX_BUILD_CORES -NIX_CC='/nix/store/kaj8d1zcn149m40s9h0xi0khakibiphz-gcc-wrapper-14.3.0' -export NIX_CC -NIX_CC_WRAPPER_TARGET_HOST_x86_64_unknown_linux_gnu='1' -export NIX_CC_WRAPPER_TARGET_HOST_x86_64_unknown_linux_gnu -NIX_CFLAGS_COMPILE=' -frandom-seed=fw0ymh1b25 -isystem /nix/store/7rrf961gz3wbpcqqwg3s3akvwvqscwk9-python3-3.12.11-env/include -isystem /nix/store/7rrf961gz3wbpcqqwg3s3akvwvqscwk9-python3-3.12.11-env/include' -export NIX_CFLAGS_COMPILE -NIX_ENFORCE_NO_NATIVE='1' -export NIX_ENFORCE_NO_NATIVE -NIX_HARDENING_ENABLE='bindnow format fortify fortify3 pic relro stackclashprotection stackprotector strictoverflow zerocallusedregs' -export NIX_HARDENING_ENABLE -NIX_LDFLAGS='-rpath /home/jawz/Development/Projects/personal/webref/outputs/out/lib -L/nix/store/7rrf961gz3wbpcqqwg3s3akvwvqscwk9-python3-3.12.11-env/lib -L/nix/store/7rrf961gz3wbpcqqwg3s3akvwvqscwk9-python3-3.12.11-env/lib' -export NIX_LDFLAGS -NIX_NO_SELF_RPATH='1' -NIX_STORE='/nix/store' -export NIX_STORE -NM='nm' -export NM -OBJCOPY='objcopy' -export OBJCOPY -OBJDUMP='objdump' -export OBJDUMP -OLDPWD='' -export OLDPWD -OPTERR='1' -OSTYPE='linux-gnu' -PATH='/nix/store/7rrf961gz3wbpcqqwg3s3akvwvqscwk9-python3-3.12.11-env/bin:/nix/store/lvs5gxfk9bw3c3bw0h9phvl42xvxgg0d-uv-0.7.22/bin:/nix/store/g7i75czfbw9sy5f8v7rjbama6lr3ya3s-patchelf-0.15.0/bin:/nix/store/kaj8d1zcn149m40s9h0xi0khakibiphz-gcc-wrapper-14.3.0/bin:/nix/store/8adzgnxs3s0pbj22qhk9zjxi1fqmz3xv-gcc-14.3.0/bin:/nix/store/p2ixvjsas4qw58dcwk01d22skwq4fyka-glibc-2.40-66-bin/bin:/nix/store/rry6qingvsrqmc7ll7jgaqpybcbdgf5v-coreutils-9.7/bin:/nix/store/87zpmcmwvn48z4lbrfba74b312h22s6c-binutils-wrapper-2.44/bin:/nix/store/ap35np2bkwaba3rxs3qlxpma57n2awyb-binutils-2.44/bin:/nix/store/rry6qingvsrqmc7ll7jgaqpybcbdgf5v-coreutils-9.7/bin:/nix/store/392hs9nhm6wfw4imjllbvb1wil1n39qx-findutils-4.10.0/bin:/nix/store/xw0mf3shymq3k7zlncf09rm8917sdi4h-diffutils-3.12/bin:/nix/store/4rpiqv9yr2pw5094v4wc33ijkqjpm9sa-gnused-4.9/bin:/nix/store/l2wvwyg680h0v2la18hz3yiznxy2naqw-gnugrep-3.11/bin:/nix/store/c1z5j28ndxljf1ihqzag57bwpfpzms0g-gawk-5.3.2/bin:/nix/store/w60s4xh1pjg6dwbw7j0b4xzlpp88q5qg-gnutar-1.35/bin:/nix/store/xd9m9jkvrs8pbxvmkzkwviql33rd090j-gzip-1.14/bin:/nix/store/w1pxx760yidi7n9vbi5bhpii9xxl5vdj-bzip2-1.0.8-bin/bin:/nix/store/xk0d14zpm0njxzdm182dd722aqhav2cc-gnumake-4.4.1/bin:/nix/store/cfqbabpc7xwg8akbcchqbq3cai6qq2vs-bash-5.2p37/bin:/nix/store/gj54zvf7vxll1mzzmqhqi1p4jiws3mfb-patch-2.7.6/bin:/nix/store/22rpb6790f346c55iqi6s9drr5qgmyjf-xz-5.8.1-bin/bin:/nix/store/xlmpcglsq8l09qh03rf0virz0331pjdc-file-5.45/bin' -export PATH -PS4='+ ' -RANLIB='ranlib' -export RANLIB -READELF='readelf' -export READELF -SHELL='/nix/store/cfqbabpc7xwg8akbcchqbq3cai6qq2vs-bash-5.2p37/bin/bash' -export SHELL -SIZE='size' -export SIZE -SOURCE_DATE_EPOCH='315532800' -export SOURCE_DATE_EPOCH -STRINGS='strings' -export STRINGS -STRIP='strip' -export STRIP -XDG_DATA_DIRS='/nix/store/7rrf961gz3wbpcqqwg3s3akvwvqscwk9-python3-3.12.11-env/share:/nix/store/lvs5gxfk9bw3c3bw0h9phvl42xvxgg0d-uv-0.7.22/share:/nix/store/g7i75czfbw9sy5f8v7rjbama6lr3ya3s-patchelf-0.15.0/share' -export XDG_DATA_DIRS -__structuredAttrs='' -export __structuredAttrs -_substituteStream_has_warned_replace_deprecation='false' -buildInputs='' -export buildInputs -buildPhase='{ echo "------------------------------------------------------------"; - echo " WARNING: the existence of this path is not guaranteed."; - echo " It is an internal implementation detail for pkgs.mkShell."; - echo "------------------------------------------------------------"; - echo; - # Record all build inputs as runtime dependencies - export; -} >> "$out" -' -export buildPhase -builder='/nix/store/cfqbabpc7xwg8akbcchqbq3cai6qq2vs-bash-5.2p37/bin/bash' -export builder -cmakeFlags='' -export cmakeFlags -configureFlags='' -export configureFlags -defaultBuildInputs='' -defaultNativeBuildInputs='/nix/store/g7i75czfbw9sy5f8v7rjbama6lr3ya3s-patchelf-0.15.0 /nix/store/gi6g289i9ydm3z896x67q210y0qq29zg-update-autotools-gnu-config-scripts-hook /nix/store/jjhw2phnaip4kg0qjas3x3fsaifi8y0w-no-broken-symlinks.sh /nix/store/h9lc1dpi14z7is86ffhl3ld569138595-audit-tmpdir.sh /nix/store/m54bmrhj6fqz8nds5zcj97w9s9bckc9v-compress-man-pages.sh /nix/store/wgrbkkaldkrlrni33ccvm3b6vbxzb656-make-symlinks-relative.sh /nix/store/5yzw0vhkyszf2d179m0qfkgxmp5wjjx4-move-docs.sh /nix/store/fyaryjvghbkpfnsyw97hb3lyb37s1pd6-move-lib64.sh /nix/store/kd4xwxjpjxi71jkm6ka0np72if9rm3y0-move-sbin.sh /nix/store/pag6l61paj1dc9sv15l7bm5c17xn5kyk-move-systemd-user-units.sh /nix/store/cmzya9irvxzlkh7lfy6i82gbp0saxqj3-multiple-outputs.sh /nix/store/hxv896faph0rqxjq2ycxpcrbnngc95sz-patch-shebangs.sh /nix/store/cickvswrvann041nqxb0rxilc46svw1n-prune-libtool-files.sh /nix/store/xyff06pkhki3qy1ls77w10s0v79c9il0-reproducible-builds.sh /nix/store/z7k98578dfzi6l3hsvbivzm7hfqlk0zc-set-source-date-epoch-to-latest.sh /nix/store/pilsssjjdxvdphlg2h19p0bfx5q0jzkn-strip.sh /nix/store/kaj8d1zcn149m40s9h0xi0khakibiphz-gcc-wrapper-14.3.0' -depsBuildBuild='' -export depsBuildBuild -depsBuildBuildPropagated='' -export depsBuildBuildPropagated -depsBuildTarget='' -export depsBuildTarget -depsBuildTargetPropagated='' -export depsBuildTargetPropagated -depsHostHost='' -export depsHostHost -depsHostHostPropagated='' -export depsHostHostPropagated -depsTargetTarget='' -export depsTargetTarget -depsTargetTargetPropagated='' -export depsTargetTargetPropagated -doCheck='' -export doCheck -doInstallCheck='' -export doInstallCheck -dontAddDisableDepTrack='1' -export dontAddDisableDepTrack -declare -a envBuildBuildHooks=() -declare -a envBuildHostHooks=() -declare -a envBuildTargetHooks=() -declare -a envHostHostHooks=('ccWrapper_addCVars' 'bintoolsWrapper_addLDVars' ) -declare -a envHostTargetHooks=('ccWrapper_addCVars' 'bintoolsWrapper_addLDVars' ) -declare -a envTargetTargetHooks=() -declare -a fixupOutputHooks=('if [ -z "${dontPatchELF-}" ]; then patchELF "$prefix"; fi' 'if [[ -z "${noAuditTmpdir-}" && -e "$prefix" ]]; then auditTmpdir "$prefix"; fi' 'if [ -z "${dontGzipMan-}" ]; then compressManPages "$prefix"; fi' '_moveLib64' '_moveSbin' '_moveSystemdUserUnits' 'patchShebangsAuto' '_pruneLibtoolFiles' '_doStrip' ) -initialPath='/nix/store/rry6qingvsrqmc7ll7jgaqpybcbdgf5v-coreutils-9.7 /nix/store/392hs9nhm6wfw4imjllbvb1wil1n39qx-findutils-4.10.0 /nix/store/xw0mf3shymq3k7zlncf09rm8917sdi4h-diffutils-3.12 /nix/store/4rpiqv9yr2pw5094v4wc33ijkqjpm9sa-gnused-4.9 /nix/store/l2wvwyg680h0v2la18hz3yiznxy2naqw-gnugrep-3.11 /nix/store/c1z5j28ndxljf1ihqzag57bwpfpzms0g-gawk-5.3.2 /nix/store/w60s4xh1pjg6dwbw7j0b4xzlpp88q5qg-gnutar-1.35 /nix/store/xd9m9jkvrs8pbxvmkzkwviql33rd090j-gzip-1.14 /nix/store/w1pxx760yidi7n9vbi5bhpii9xxl5vdj-bzip2-1.0.8-bin /nix/store/xk0d14zpm0njxzdm182dd722aqhav2cc-gnumake-4.4.1 /nix/store/cfqbabpc7xwg8akbcchqbq3cai6qq2vs-bash-5.2p37 /nix/store/gj54zvf7vxll1mzzmqhqi1p4jiws3mfb-patch-2.7.6 /nix/store/22rpb6790f346c55iqi6s9drr5qgmyjf-xz-5.8.1-bin /nix/store/xlmpcglsq8l09qh03rf0virz0331pjdc-file-5.45' -mesonFlags='' -export mesonFlags -name='nix-shell-env' -export name -nativeBuildInputs='/nix/store/7rrf961gz3wbpcqqwg3s3akvwvqscwk9-python3-3.12.11-env /nix/store/lvs5gxfk9bw3c3bw0h9phvl42xvxgg0d-uv-0.7.22' -export nativeBuildInputs -out='/home/jawz/Development/Projects/personal/webref/outputs/out' -export out -outputBin='out' -outputDev='out' -outputDevdoc='REMOVE' -outputDevman='out' -outputDoc='out' -outputInclude='out' -outputInfo='out' -outputLib='out' -outputMan='out' -outputs='out' -export outputs -patches='' -export patches -phases='buildPhase' -export phases -pkg='/nix/store/kaj8d1zcn149m40s9h0xi0khakibiphz-gcc-wrapper-14.3.0' -declare -a pkgsBuildBuild=() -declare -a pkgsBuildHost=('/nix/store/7rrf961gz3wbpcqqwg3s3akvwvqscwk9-python3-3.12.11-env' '/nix/store/lvs5gxfk9bw3c3bw0h9phvl42xvxgg0d-uv-0.7.22' '/nix/store/g7i75czfbw9sy5f8v7rjbama6lr3ya3s-patchelf-0.15.0' '/nix/store/gi6g289i9ydm3z896x67q210y0qq29zg-update-autotools-gnu-config-scripts-hook' '/nix/store/jjhw2phnaip4kg0qjas3x3fsaifi8y0w-no-broken-symlinks.sh' '/nix/store/h9lc1dpi14z7is86ffhl3ld569138595-audit-tmpdir.sh' '/nix/store/m54bmrhj6fqz8nds5zcj97w9s9bckc9v-compress-man-pages.sh' '/nix/store/wgrbkkaldkrlrni33ccvm3b6vbxzb656-make-symlinks-relative.sh' '/nix/store/5yzw0vhkyszf2d179m0qfkgxmp5wjjx4-move-docs.sh' '/nix/store/fyaryjvghbkpfnsyw97hb3lyb37s1pd6-move-lib64.sh' '/nix/store/kd4xwxjpjxi71jkm6ka0np72if9rm3y0-move-sbin.sh' '/nix/store/pag6l61paj1dc9sv15l7bm5c17xn5kyk-move-systemd-user-units.sh' '/nix/store/cmzya9irvxzlkh7lfy6i82gbp0saxqj3-multiple-outputs.sh' '/nix/store/hxv896faph0rqxjq2ycxpcrbnngc95sz-patch-shebangs.sh' '/nix/store/cickvswrvann041nqxb0rxilc46svw1n-prune-libtool-files.sh' '/nix/store/xyff06pkhki3qy1ls77w10s0v79c9il0-reproducible-builds.sh' '/nix/store/z7k98578dfzi6l3hsvbivzm7hfqlk0zc-set-source-date-epoch-to-latest.sh' '/nix/store/pilsssjjdxvdphlg2h19p0bfx5q0jzkn-strip.sh' '/nix/store/kaj8d1zcn149m40s9h0xi0khakibiphz-gcc-wrapper-14.3.0' '/nix/store/87zpmcmwvn48z4lbrfba74b312h22s6c-binutils-wrapper-2.44' ) -declare -a pkgsBuildTarget=() -declare -a pkgsHostHost=() -declare -a pkgsHostTarget=() -declare -a pkgsTargetTarget=() -declare -a postFixupHooks=('noBrokenSymlinksInAllOutputs' '_makeSymlinksRelativeInAllOutputs' '_multioutPropagateDev' ) -declare -a postUnpackHooks=('_updateSourceDateEpochFromSourceRoot' ) -declare -a preConfigureHooks=('_multioutConfig' ) -preConfigurePhases=' updateAutotoolsGnuConfigScriptsPhase' -declare -a preFixupHooks=('_moveToShare' '_multioutDocs' '_multioutDevs' ) -preferLocalBuild='1' -export preferLocalBuild -prefix='/home/jawz/Development/Projects/personal/webref/outputs/out' -declare -a propagatedBuildDepFiles=('propagated-build-build-deps' 'propagated-native-build-inputs' 'propagated-build-target-deps' ) -propagatedBuildInputs='' -export propagatedBuildInputs -declare -a propagatedHostDepFiles=('propagated-host-host-deps' 'propagated-build-inputs' ) -propagatedNativeBuildInputs='' -export propagatedNativeBuildInputs -declare -a propagatedTargetDepFiles=('propagated-target-target-deps' ) -shell='/nix/store/cfqbabpc7xwg8akbcchqbq3cai6qq2vs-bash-5.2p37/bin/bash' -export shell -shellHook='' -export shellHook -stdenv='/nix/store/p2mnji2cdxgf6h27hlqzqf7g8f9bqfsi-stdenv-linux' -export stdenv -strictDeps='' -export strictDeps -system='x86_64-linux' -export system -declare -a unpackCmdHooks=('_defaultUnpack' ) -_activatePkgs () -{ - - local hostOffset targetOffset; - local pkg; - for hostOffset in "${allPlatOffsets[@]}"; - do - local pkgsVar="${pkgAccumVarVars[hostOffset + 1]}"; - for targetOffset in "${allPlatOffsets[@]}"; - do - (( hostOffset <= targetOffset )) || continue; - local pkgsRef="${pkgsVar}[$targetOffset - $hostOffset]"; - local pkgsSlice="${!pkgsRef}[@]"; - for pkg in ${!pkgsSlice+"${!pkgsSlice}"}; - do - activatePackage "$pkg" "$hostOffset" "$targetOffset"; - done; - done; - done -} -_addRpathPrefix () -{ - - if [ "${NIX_NO_SELF_RPATH:-0}" != 1 ]; then - export NIX_LDFLAGS="-rpath $1/lib ${NIX_LDFLAGS-}"; - fi -} -_addToEnv () -{ - - local depHostOffset depTargetOffset; - local pkg; - for depHostOffset in "${allPlatOffsets[@]}"; - do - local hookVar="${pkgHookVarVars[depHostOffset + 1]}"; - local pkgsVar="${pkgAccumVarVars[depHostOffset + 1]}"; - for depTargetOffset in "${allPlatOffsets[@]}"; - do - (( depHostOffset <= depTargetOffset )) || continue; - local hookRef="${hookVar}[$depTargetOffset - $depHostOffset]"; - if [[ -z "${strictDeps-}" ]]; then - local visitedPkgs=""; - for pkg in "${pkgsBuildBuild[@]}" "${pkgsBuildHost[@]}" "${pkgsBuildTarget[@]}" "${pkgsHostHost[@]}" "${pkgsHostTarget[@]}" "${pkgsTargetTarget[@]}"; - do - if [[ "$visitedPkgs" = *"$pkg"* ]]; then - continue; - fi; - runHook "${!hookRef}" "$pkg"; - visitedPkgs+=" $pkg"; - done; - else - local pkgsRef="${pkgsVar}[$depTargetOffset - $depHostOffset]"; - local pkgsSlice="${!pkgsRef}[@]"; - for pkg in ${!pkgsSlice+"${!pkgsSlice}"}; - do - runHook "${!hookRef}" "$pkg"; - done; - fi; - done; - done -} -_allFlags () -{ - - export system pname name version; - while IFS='' read -r varName; do - nixTalkativeLog "@${varName}@ -> ${!varName}"; - args+=("--subst-var" "$varName"); - done < <(awk 'BEGIN { for (v in ENVIRON) if (v ~ /^[a-z][a-zA-Z0-9_]*$/) print v }') -} -_assignFirst () -{ - - local varName="$1"; - local _var; - local REMOVE=REMOVE; - shift; - for _var in "$@"; - do - if [ -n "${!_var-}" ]; then - eval "${varName}"="${_var}"; - return; - fi; - done; - echo; - echo "error: _assignFirst: could not find a non-empty variable whose name to assign to ${varName}."; - echo " The following variables were all unset or empty:"; - echo " $*"; - if [ -z "${out:-}" ]; then - echo ' If you do not want an "out" output in your derivation, make sure to define'; - echo ' the other specific required outputs. This can be achieved by picking one'; - echo " of the above as an output."; - echo ' You do not have to remove "out" if you want to have a different default'; - echo ' output, because the first output is taken as a default.'; - echo; - fi; - return 1 -} -_callImplicitHook () -{ - - local def="$1"; - local hookName="$2"; - if declare -F "$hookName" > /dev/null; then - nixTalkativeLog "calling implicit '$hookName' function hook"; - "$hookName"; - else - if type -p "$hookName" > /dev/null; then - nixTalkativeLog "sourcing implicit '$hookName' script hook"; - source "$hookName"; - else - if [ -n "${!hookName:-}" ]; then - nixTalkativeLog "evaling implicit '$hookName' string hook"; - eval "${!hookName}"; - else - return "$def"; - fi; - fi; - fi -} -_defaultUnpack () -{ - - local fn="$1"; - local destination; - if [ -d "$fn" ]; then - destination="$(stripHash "$fn")"; - if [ -e "$destination" ]; then - echo "Cannot copy $fn to $destination: destination already exists!"; - echo "Did you specify two \"srcs\" with the same \"name\"?"; - return 1; - fi; - cp -r --preserve=mode,timestamps --reflink=auto -- "$fn" "$destination"; - else - case "$fn" in - *.tar.xz | *.tar.lzma | *.txz) - ( XZ_OPT="--threads=$NIX_BUILD_CORES" xz -d < "$fn"; - true ) | tar xf - --mode=+w --warning=no-timestamp - ;; - *.tar | *.tar.* | *.tgz | *.tbz2 | *.tbz) - tar xf "$fn" --mode=+w --warning=no-timestamp - ;; - *) - return 1 - ;; - esac; - fi -} -_doStrip () -{ - - local -ra flags=(dontStripHost dontStripTarget); - local -ra debugDirs=(stripDebugList stripDebugListTarget); - local -ra allDirs=(stripAllList stripAllListTarget); - local -ra stripCmds=(STRIP STRIP_FOR_TARGET); - local -ra ranlibCmds=(RANLIB RANLIB_FOR_TARGET); - stripDebugList=${stripDebugList[*]:-lib lib32 lib64 libexec bin sbin Applications Library/Frameworks}; - stripDebugListTarget=${stripDebugListTarget[*]:-}; - stripAllList=${stripAllList[*]:-}; - stripAllListTarget=${stripAllListTarget[*]:-}; - local i; - for i in ${!stripCmds[@]}; - do - local -n flag="${flags[$i]}"; - local -n debugDirList="${debugDirs[$i]}"; - local -n allDirList="${allDirs[$i]}"; - local -n stripCmd="${stripCmds[$i]}"; - local -n ranlibCmd="${ranlibCmds[$i]}"; - if [[ -n "${dontStrip-}" || -n "${flag-}" ]] || ! type -f "${stripCmd-}" 2> /dev/null 1>&2; then - continue; - fi; - stripDirs "$stripCmd" "$ranlibCmd" "$debugDirList" "${stripDebugFlags[*]:--S -p}"; - stripDirs "$stripCmd" "$ranlibCmd" "$allDirList" "${stripAllFlags[*]:--s -p}"; - done -} -_eval () -{ - - if declare -F "$1" > /dev/null 2>&1; then - "$@"; - else - eval "$1"; - fi -} -_logHook () -{ - - if [[ -z ${NIX_LOG_FD-} ]]; then - return; - fi; - local hookKind="$1"; - local hookExpr="$2"; - shift 2; - if declare -F "$hookExpr" > /dev/null 2>&1; then - nixTalkativeLog "calling '$hookKind' function hook '$hookExpr'" "$@"; - else - if type -p "$hookExpr" > /dev/null; then - nixTalkativeLog "sourcing '$hookKind' script hook '$hookExpr'"; - else - if [[ "$hookExpr" != "_callImplicitHook"* ]]; then - local exprToOutput; - if [[ ${NIX_DEBUG:-0} -ge 5 ]]; then - exprToOutput="$hookExpr"; - else - local hookExprLine; - while IFS= read -r hookExprLine; do - hookExprLine="${hookExprLine#"${hookExprLine%%[![:space:]]*}"}"; - if [[ -n "$hookExprLine" ]]; then - exprToOutput+="$hookExprLine\\n "; - fi; - done <<< "$hookExpr"; - exprToOutput="${exprToOutput%%\\n }"; - fi; - nixTalkativeLog "evaling '$hookKind' string hook '$exprToOutput'"; - fi; - fi; - fi -} -_makeSymlinksRelative () -{ - - local symlinkTarget; - if [ "${dontRewriteSymlinks-}" ] || [ ! -e "$prefix" ]; then - return; - fi; - while IFS= read -r -d '' f; do - symlinkTarget=$(readlink "$f"); - if [[ "$symlinkTarget"/ != "$prefix"/* ]]; then - continue; - fi; - if [ ! -e "$symlinkTarget" ]; then - echo "the symlink $f is broken, it points to $symlinkTarget (which is missing)"; - fi; - echo "rewriting symlink $f to be relative to $prefix"; - ln -snrf "$symlinkTarget" "$f"; - done < <(find $prefix -type l -print0) -} -_makeSymlinksRelativeInAllOutputs () -{ - - local output; - for output in $(getAllOutputNames); - do - prefix="${!output}" _makeSymlinksRelative; - done -} -_moveLib64 () -{ - - if [ "${dontMoveLib64-}" = 1 ]; then - return; - fi; - if [ ! -e "$prefix/lib64" -o -L "$prefix/lib64" ]; then - return; - fi; - echo "moving $prefix/lib64/* to $prefix/lib"; - mkdir -p $prefix/lib; - shopt -s dotglob; - for i in $prefix/lib64/*; - do - mv --no-clobber "$i" $prefix/lib; - done; - shopt -u dotglob; - rmdir $prefix/lib64; - ln -s lib $prefix/lib64 -} -_moveSbin () -{ - - if [ "${dontMoveSbin-}" = 1 ]; then - return; - fi; - if [ ! -e "$prefix/sbin" -o -L "$prefix/sbin" ]; then - return; - fi; - echo "moving $prefix/sbin/* to $prefix/bin"; - mkdir -p $prefix/bin; - shopt -s dotglob; - for i in $prefix/sbin/*; - do - mv "$i" $prefix/bin; - done; - shopt -u dotglob; - rmdir $prefix/sbin; - ln -s bin $prefix/sbin -} -_moveSystemdUserUnits () -{ - - if [ "${dontMoveSystemdUserUnits:-0}" = 1 ]; then - return; - fi; - if [ ! -e "${prefix:?}/lib/systemd/user" ]; then - return; - fi; - local source="$prefix/lib/systemd/user"; - local target="$prefix/share/systemd/user"; - echo "moving $source/* to $target"; - mkdir -p "$target"; - ( shopt -s dotglob; - for i in "$source"/*; - do - mv "$i" "$target"; - done ); - rmdir "$source"; - ln -s "$target" "$source" -} -_moveToShare () -{ - - if [ -n "$__structuredAttrs" ]; then - if [ -z "${forceShare-}" ]; then - forceShare=(man doc info); - fi; - else - forceShare=(${forceShare:-man doc info}); - fi; - if [[ -z "$out" ]]; then - return; - fi; - for d in "${forceShare[@]}"; - do - if [ -d "$out/$d" ]; then - if [ -d "$out/share/$d" ]; then - echo "both $d/ and share/$d/ exist!"; - else - echo "moving $out/$d to $out/share/$d"; - mkdir -p $out/share; - mv $out/$d $out/share/; - fi; - fi; - done -} -_multioutConfig () -{ - - if [ "$(getAllOutputNames)" = "out" ] || [ -z "${setOutputFlags-1}" ]; then - return; - fi; - if [ -z "${shareDocName:-}" ]; then - local confScript="${configureScript:-}"; - if [ -z "$confScript" ] && [ -x ./configure ]; then - confScript=./configure; - fi; - if [ -f "$confScript" ]; then - local shareDocName="$(sed -n "s/^PACKAGE_TARNAME='\(.*\)'$/\1/p" < "$confScript")"; - fi; - if [ -z "$shareDocName" ] || echo "$shareDocName" | grep -q '[^a-zA-Z0-9_-]'; then - shareDocName="$(echo "$name" | sed 's/-[^a-zA-Z].*//')"; - fi; - fi; - prependToVar configureFlags --bindir="${!outputBin}"/bin --sbindir="${!outputBin}"/sbin --includedir="${!outputInclude}"/include --mandir="${!outputMan}"/share/man --infodir="${!outputInfo}"/share/info --docdir="${!outputDoc}"/share/doc/"${shareDocName}" --libdir="${!outputLib}"/lib --libexecdir="${!outputLib}"/libexec --localedir="${!outputLib}"/share/locale; - prependToVar installFlags pkgconfigdir="${!outputDev}"/lib/pkgconfig m4datadir="${!outputDev}"/share/aclocal aclocaldir="${!outputDev}"/share/aclocal -} -_multioutDevs () -{ - - if [ "$(getAllOutputNames)" = "out" ] || [ -z "${moveToDev-1}" ]; then - return; - fi; - moveToOutput include "${!outputInclude}"; - moveToOutput lib/pkgconfig "${!outputDev}"; - moveToOutput share/pkgconfig "${!outputDev}"; - moveToOutput lib/cmake "${!outputDev}"; - moveToOutput share/aclocal "${!outputDev}"; - for f in "${!outputDev}"/{lib,share}/pkgconfig/*.pc; - do - echo "Patching '$f' includedir to output ${!outputInclude}"; - sed -i "/^includedir=/s,=\${prefix},=${!outputInclude}," "$f"; - done -} -_multioutDocs () -{ - - local REMOVE=REMOVE; - moveToOutput share/info "${!outputInfo}"; - moveToOutput share/doc "${!outputDoc}"; - moveToOutput share/gtk-doc "${!outputDevdoc}"; - moveToOutput share/devhelp/books "${!outputDevdoc}"; - moveToOutput share/man "${!outputMan}"; - moveToOutput share/man/man3 "${!outputDevman}" -} -_multioutPropagateDev () -{ - - if [ "$(getAllOutputNames)" = "out" ]; then - return; - fi; - local outputFirst; - for outputFirst in $(getAllOutputNames); - do - break; - done; - local propagaterOutput="$outputDev"; - if [ -z "$propagaterOutput" ]; then - propagaterOutput="$outputFirst"; - fi; - if [ -z "${propagatedBuildOutputs+1}" ]; then - local po_dirty="$outputBin $outputInclude $outputLib"; - set +o pipefail; - propagatedBuildOutputs=`echo "$po_dirty" | tr -s ' ' '\n' | grep -v -F "$propagaterOutput" | sort -u | tr '\n' ' ' `; - set -o pipefail; - fi; - if [ -z "$propagatedBuildOutputs" ]; then - return; - fi; - mkdir -p "${!propagaterOutput}"/nix-support; - for output in $propagatedBuildOutputs; - do - echo -n " ${!output}" >> "${!propagaterOutput}"/nix-support/propagated-build-inputs; - done -} -_nixLogWithLevel () -{ - - [[ -z ${NIX_LOG_FD-} || ${NIX_DEBUG:-0} -lt ${1:?} ]] && return 0; - local logLevel; - case "${1:?}" in - 0) - logLevel=ERROR - ;; - 1) - logLevel=WARN - ;; - 2) - logLevel=NOTICE - ;; - 3) - logLevel=INFO - ;; - 4) - logLevel=TALKATIVE - ;; - 5) - logLevel=CHATTY - ;; - 6) - logLevel=DEBUG - ;; - 7) - logLevel=VOMIT - ;; - *) - echo "_nixLogWithLevel: called with invalid log level: ${1:?}" >&"$NIX_LOG_FD"; - return 1 - ;; - esac; - local callerName="${FUNCNAME[2]}"; - if [[ $callerName == "_callImplicitHook" ]]; then - callerName="${hookName:?}"; - fi; - printf "%s: %s: %s\n" "$logLevel" "$callerName" "${2:?}" >&"$NIX_LOG_FD" -} -_overrideFirst () -{ - - if [ -z "${!1-}" ]; then - _assignFirst "$@"; - fi -} -_pruneLibtoolFiles () -{ - - if [ "${dontPruneLibtoolFiles-}" ] || [ ! -e "$prefix" ]; then - return; - fi; - find "$prefix" -type f -name '*.la' -exec grep -q '^# Generated by .*libtool' {} \; -exec grep -q "^old_library=''" {} \; -exec sed -i {} -e "/^dependency_libs='[^']/ c dependency_libs='' #pruned" \; -} -_updateSourceDateEpochFromSourceRoot () -{ - - if [ -n "$sourceRoot" ]; then - updateSourceDateEpoch "$sourceRoot"; - fi -} -activatePackage () -{ - - local pkg="$1"; - local -r hostOffset="$2"; - local -r targetOffset="$3"; - (( hostOffset <= targetOffset )) || exit 1; - if [ -f "$pkg" ]; then - nixTalkativeLog "sourcing setup hook '$pkg'"; - source "$pkg"; - fi; - if [[ -z "${strictDeps-}" || "$hostOffset" -le -1 ]]; then - addToSearchPath _PATH "$pkg/bin"; - fi; - if (( hostOffset <= -1 )); then - addToSearchPath _XDG_DATA_DIRS "$pkg/share"; - fi; - if [[ "$hostOffset" -eq 0 && -d "$pkg/bin" ]]; then - addToSearchPath _HOST_PATH "$pkg/bin"; - fi; - if [[ -f "$pkg/nix-support/setup-hook" ]]; then - nixTalkativeLog "sourcing setup hook '$pkg/nix-support/setup-hook'"; - source "$pkg/nix-support/setup-hook"; - fi -} -addEnvHooks () -{ - - local depHostOffset="$1"; - shift; - local pkgHookVarsSlice="${pkgHookVarVars[$depHostOffset + 1]}[@]"; - local pkgHookVar; - for pkgHookVar in "${!pkgHookVarsSlice}"; - do - eval "${pkgHookVar}s"'+=("$@")'; - done -} -addToSearchPath () -{ - - addToSearchPathWithCustomDelimiter ":" "$@" -} -addToSearchPathWithCustomDelimiter () -{ - - local delimiter="$1"; - local varName="$2"; - local dir="$3"; - if [[ -d "$dir" && "${!varName:+${delimiter}${!varName}${delimiter}}" != *"${delimiter}${dir}${delimiter}"* ]]; then - export "${varName}=${!varName:+${!varName}${delimiter}}${dir}"; - fi -} -appendToVar () -{ - - local -n nameref="$1"; - local useArray type; - if [ -n "$__structuredAttrs" ]; then - useArray=true; - else - useArray=false; - fi; - if type=$(declare -p "$1" 2> /dev/null); then - case "${type#* }" in - -A*) - echo "appendToVar(): ERROR: trying to use appendToVar on an associative array, use variable+=([\"X\"]=\"Y\") instead." 1>&2; - return 1 - ;; - -a*) - useArray=true - ;; - *) - useArray=false - ;; - esac; - fi; - shift; - if $useArray; then - nameref=(${nameref+"${nameref[@]}"} "$@"); - else - nameref="${nameref-} $*"; - fi -} -auditTmpdir () -{ - - local dir="$1"; - [ -e "$dir" ] || return 0; - echo "checking for references to $TMPDIR/ in $dir..."; - local i; - find "$dir" -type f -print0 | while IFS= read -r -d '' i; do - if [[ "$i" =~ .build-id ]]; then - continue; - fi; - if isELF "$i"; then - if { - printf :; - patchelf --print-rpath "$i" - } | grep -q -F ":$TMPDIR/"; then - echo "RPATH of binary $i contains a forbidden reference to $TMPDIR/"; - exit 1; - fi; - fi; - if isScript "$i"; then - if [ -e "$(dirname "$i")/.$(basename "$i")-wrapped" ]; then - if grep -q -F "$TMPDIR/" "$i"; then - echo "wrapper script $i contains a forbidden reference to $TMPDIR/"; - exit 1; - fi; - fi; - fi; - done -} -bintoolsWrapper_addLDVars () -{ - - local role_post; - getHostRoleEnvHook; - if [[ -d "$1/lib64" && ! -L "$1/lib64" ]]; then - export NIX_LDFLAGS${role_post}+=" -L$1/lib64"; - fi; - if [[ -d "$1/lib" ]]; then - local -a glob=($1/lib/lib*); - if [ "${#glob[*]}" -gt 0 ]; then - export NIX_LDFLAGS${role_post}+=" -L$1/lib"; - fi; - fi -} -buildPhase () -{ - - runHook preBuild; - if [[ -z "${makeFlags-}" && -z "${makefile:-}" && ! ( -e Makefile || -e makefile || -e GNUmakefile ) ]]; then - echo "no Makefile or custom buildPhase, doing nothing"; - else - foundMakefile=1; - local flagsArray=(${enableParallelBuilding:+-j${NIX_BUILD_CORES}} SHELL="$SHELL"); - concatTo flagsArray makeFlags makeFlagsArray buildFlags buildFlagsArray; - echoCmd 'build flags' "${flagsArray[@]}"; - make ${makefile:+-f $makefile} "${flagsArray[@]}"; - unset flagsArray; - fi; - runHook postBuild -} -ccWrapper_addCVars () -{ - - local role_post; - getHostRoleEnvHook; - if [ -d "$1/include" ]; then - export NIX_CFLAGS_COMPILE${role_post}+=" -isystem $1/include"; - fi; - if [ -d "$1/Library/Frameworks" ]; then - export NIX_CFLAGS_COMPILE${role_post}+=" -iframework $1/Library/Frameworks"; - fi -} -checkPhase () -{ - - runHook preCheck; - if [[ -z "${foundMakefile:-}" ]]; then - echo "no Makefile or custom checkPhase, doing nothing"; - runHook postCheck; - return; - fi; - if [[ -z "${checkTarget:-}" ]]; then - if make -n ${makefile:+-f $makefile} check > /dev/null 2>&1; then - checkTarget="check"; - else - if make -n ${makefile:+-f $makefile} test > /dev/null 2>&1; then - checkTarget="test"; - fi; - fi; - fi; - if [[ -z "${checkTarget:-}" ]]; then - echo "no check/test target in ${makefile:-Makefile}, doing nothing"; - else - local flagsArray=(${enableParallelChecking:+-j${NIX_BUILD_CORES}} SHELL="$SHELL"); - concatTo flagsArray makeFlags makeFlagsArray checkFlags=VERBOSE=y checkFlagsArray checkTarget; - echoCmd 'check flags' "${flagsArray[@]}"; - make ${makefile:+-f $makefile} "${flagsArray[@]}"; - unset flagsArray; - fi; - runHook postCheck -} -compressManPages () -{ - - local dir="$1"; - if [ -L "$dir"/share ] || [ -L "$dir"/share/man ] || [ ! -d "$dir/share/man" ]; then - return; - fi; - echo "gzipping man pages under $dir/share/man/"; - find "$dir"/share/man/ -type f -a '!' -regex '.*\.\(bz2\|gz\|xz\)$' -print0 | while IFS= read -r -d '' f; do - if gzip -c -n "$f" > "$f".gz; then - rm "$f"; - else - rm "$f".gz; - fi; - done; - find "$dir"/share/man/ -type l -a '!' -regex '.*\.\(bz2\|gz\|xz\)$' -print0 | sort -z | while IFS= read -r -d '' f; do - local target; - target="$(readlink -f "$f")"; - if [ -f "$target".gz ]; then - ln -sf "$target".gz "$f".gz && rm "$f"; - fi; - done -} -concatStringsSep () -{ - - local sep="$1"; - local name="$2"; - local type oldifs; - if type=$(declare -p "$name" 2> /dev/null); then - local -n nameref="$name"; - case "${type#* }" in - -A*) - echo "concatStringsSep(): ERROR: trying to use concatStringsSep on an associative array." 1>&2; - return 1 - ;; - -a*) - local IFS="$(printf '\036')" - ;; - *) - local IFS=" " - ;; - esac; - local ifs_separated="${nameref[*]}"; - echo -n "${ifs_separated//"$IFS"/"$sep"}"; - fi -} -concatTo () -{ - - local -; - set -o noglob; - local -n targetref="$1"; - shift; - local arg default name type; - for arg in "$@"; - do - IFS="=" read -r name default <<< "$arg"; - local -n nameref="$name"; - if [[ -z "${nameref[*]}" && -n "$default" ]]; then - targetref+=("$default"); - else - if type=$(declare -p "$name" 2> /dev/null); then - case "${type#* }" in - -A*) - echo "concatTo(): ERROR: trying to use concatTo on an associative array." 1>&2; - return 1 - ;; - -a*) - targetref+=("${nameref[@]}") - ;; - *) - if [[ "$name" = *"Array" ]]; then - nixErrorLog "concatTo(): $name is not declared as array, treating as a singleton. This will become an error in future"; - targetref+=(${nameref+"${nameref[@]}"}); - else - targetref+=(${nameref-}); - fi - ;; - esac; - fi; - fi; - done -} -configurePhase () -{ - - runHook preConfigure; - : "${configureScript=}"; - if [[ -z "$configureScript" && -x ./configure ]]; then - configureScript=./configure; - fi; - if [ -z "${dontFixLibtool:-}" ]; then - export lt_cv_deplibs_check_method="${lt_cv_deplibs_check_method-pass_all}"; - local i; - find . -iname "ltmain.sh" -print0 | while IFS='' read -r -d '' i; do - echo "fixing libtool script $i"; - fixLibtool "$i"; - done; - CONFIGURE_MTIME_REFERENCE=$(mktemp configure.mtime.reference.XXXXXX); - find . -executable -type f -name configure -exec grep -l 'GNU Libtool is free software; you can redistribute it and/or modify' {} \; -exec touch -r {} "$CONFIGURE_MTIME_REFERENCE" \; -exec sed -i s_/usr/bin/file_file_g {} \; -exec touch -r "$CONFIGURE_MTIME_REFERENCE" {} \;; - rm -f "$CONFIGURE_MTIME_REFERENCE"; - fi; - if [[ -z "${dontAddPrefix:-}" && -n "$prefix" ]]; then - prependToVar configureFlags "${prefixKey:---prefix=}$prefix"; - fi; - if [[ -f "$configureScript" ]]; then - if [ -z "${dontAddDisableDepTrack:-}" ]; then - if grep -q dependency-tracking "$configureScript"; then - prependToVar configureFlags --disable-dependency-tracking; - fi; - fi; - if [ -z "${dontDisableStatic:-}" ]; then - if grep -q enable-static "$configureScript"; then - prependToVar configureFlags --disable-static; - fi; - fi; - if [ -z "${dontPatchShebangsInConfigure:-}" ]; then - patchShebangs --build "$configureScript"; - fi; - fi; - if [ -n "$configureScript" ]; then - local -a flagsArray; - concatTo flagsArray configureFlags configureFlagsArray; - echoCmd 'configure flags' "${flagsArray[@]}"; - $configureScript "${flagsArray[@]}"; - unset flagsArray; - else - echo "no configure script, doing nothing"; - fi; - runHook postConfigure -} -consumeEntire () -{ - - if IFS='' read -r -d '' "$1"; then - echo "consumeEntire(): ERROR: Input null bytes, won't process" 1>&2; - return 1; - fi -} -distPhase () -{ - - runHook preDist; - local flagsArray=(); - concatTo flagsArray distFlags distFlagsArray distTarget=dist; - echo 'dist flags: %q' "${flagsArray[@]}"; - make ${makefile:+-f $makefile} "${flagsArray[@]}"; - if [ "${dontCopyDist:-0}" != 1 ]; then - mkdir -p "$out/tarballs"; - cp -pvd ${tarballs[*]:-*.tar.gz} "$out/tarballs"; - fi; - runHook postDist -} -dumpVars () -{ - - if [ "${noDumpEnvVars:-0}" != 1 ]; then - { - install -m 0600 /dev/null "$NIX_BUILD_TOP/env-vars" && export 2> /dev/null >| "$NIX_BUILD_TOP/env-vars" - } || true; - fi -} -echoCmd () -{ - - printf "%s:" "$1"; - shift; - printf ' %q' "$@"; - echo -} -exitHandler () -{ - - exitCode="$?"; - set +e; - if [ -n "${showBuildStats:-}" ]; then - read -r -d '' -a buildTimes < <(times); - echo "build times:"; - echo "user time for the shell ${buildTimes[0]}"; - echo "system time for the shell ${buildTimes[1]}"; - echo "user time for all child processes ${buildTimes[2]}"; - echo "system time for all child processes ${buildTimes[3]}"; - fi; - if (( "$exitCode" != 0 )); then - runHook failureHook; - if [ -n "${succeedOnFailure:-}" ]; then - echo "build failed with exit code $exitCode (ignored)"; - mkdir -p "$out/nix-support"; - printf "%s" "$exitCode" > "$out/nix-support/failed"; - exit 0; - fi; - else - runHook exitHook; - fi; - return "$exitCode" -} -findInputs () -{ - - local -r pkg="$1"; - local -r hostOffset="$2"; - local -r targetOffset="$3"; - (( hostOffset <= targetOffset )) || exit 1; - local varVar="${pkgAccumVarVars[hostOffset + 1]}"; - local varRef="$varVar[$((targetOffset - hostOffset))]"; - local var="${!varRef}"; - unset -v varVar varRef; - local varSlice="$var[*]"; - case " ${!varSlice-} " in - *" $pkg "*) - return 0 - ;; - esac; - unset -v varSlice; - eval "$var"'+=("$pkg")'; - if ! [ -e "$pkg" ]; then - echo "build input $pkg does not exist" 1>&2; - exit 1; - fi; - function mapOffset () - { - local -r inputOffset="$1"; - local -n outputOffset="$2"; - if (( inputOffset <= 0 )); then - outputOffset=$((inputOffset + hostOffset)); - else - outputOffset=$((inputOffset - 1 + targetOffset)); - fi - }; - local relHostOffset; - for relHostOffset in "${allPlatOffsets[@]}"; - do - local files="${propagatedDepFilesVars[relHostOffset + 1]}"; - local hostOffsetNext; - mapOffset "$relHostOffset" hostOffsetNext; - (( -1 <= hostOffsetNext && hostOffsetNext <= 1 )) || continue; - local relTargetOffset; - for relTargetOffset in "${allPlatOffsets[@]}"; - do - (( "$relHostOffset" <= "$relTargetOffset" )) || continue; - local fileRef="${files}[$relTargetOffset - $relHostOffset]"; - local file="${!fileRef}"; - unset -v fileRef; - local targetOffsetNext; - mapOffset "$relTargetOffset" targetOffsetNext; - (( -1 <= hostOffsetNext && hostOffsetNext <= 1 )) || continue; - [[ -f "$pkg/nix-support/$file" ]] || continue; - local pkgNext; - read -r -d '' pkgNext < "$pkg/nix-support/$file" || true; - for pkgNext in $pkgNext; - do - findInputs "$pkgNext" "$hostOffsetNext" "$targetOffsetNext"; - done; - done; - done -} -fixLibtool () -{ - - local search_path; - for flag in $NIX_LDFLAGS; - do - case $flag in - -L*) - search_path+=" ${flag#-L}" - ;; - esac; - done; - sed -i "$1" -e "s^eval \(sys_lib_search_path=\).*^\1'${search_path:-}'^" -e 's^eval sys_lib_.+search_path=.*^^' -} -fixupPhase () -{ - - local output; - for output in $(getAllOutputNames); - do - if [ -e "${!output}" ]; then - chmod -R u+w,u-s,g-s "${!output}"; - fi; - done; - runHook preFixup; - local output; - for output in $(getAllOutputNames); - do - prefix="${!output}" runHook fixupOutput; - done; - recordPropagatedDependencies; - if [ -n "${setupHook:-}" ]; then - mkdir -p "${!outputDev}/nix-support"; - substituteAll "$setupHook" "${!outputDev}/nix-support/setup-hook"; - fi; - if [ -n "${setupHooks:-}" ]; then - mkdir -p "${!outputDev}/nix-support"; - local hook; - for hook in ${setupHooks[@]}; - do - local content; - consumeEntire content < "$hook"; - substituteAllStream content "file '$hook'" >> "${!outputDev}/nix-support/setup-hook"; - unset -v content; - done; - unset -v hook; - fi; - if [ -n "${propagatedUserEnvPkgs[*]:-}" ]; then - mkdir -p "${!outputBin}/nix-support"; - printWords "${propagatedUserEnvPkgs[@]}" > "${!outputBin}/nix-support/propagated-user-env-packages"; - fi; - runHook postFixup -} -genericBuild () -{ - - export GZIP_NO_TIMESTAMPS=1; - if [ -f "${buildCommandPath:-}" ]; then - source "$buildCommandPath"; - return; - fi; - if [ -n "${buildCommand:-}" ]; then - eval "$buildCommand"; - return; - fi; - if [ -z "${phases[*]:-}" ]; then - phases="${prePhases[*]:-} unpackPhase patchPhase ${preConfigurePhases[*]:-} configurePhase ${preBuildPhases[*]:-} buildPhase checkPhase ${preInstallPhases[*]:-} installPhase ${preFixupPhases[*]:-} fixupPhase installCheckPhase ${preDistPhases[*]:-} distPhase ${postPhases[*]:-}"; - fi; - for curPhase in ${phases[*]}; - do - runPhase "$curPhase"; - done -} -getAllOutputNames () -{ - - if [ -n "$__structuredAttrs" ]; then - echo "${!outputs[*]}"; - else - echo "$outputs"; - fi -} -getHostRole () -{ - - getRole "$hostOffset" -} -getHostRoleEnvHook () -{ - - getRole "$depHostOffset" -} -getRole () -{ - - case $1 in - -1) - role_post='_FOR_BUILD' - ;; - 0) - role_post='' - ;; - 1) - role_post='_FOR_TARGET' - ;; - *) - echo "binutils-wrapper-2.44: used as improper sort of dependency" 1>&2; - return 1 - ;; - esac -} -getTargetRole () -{ - - getRole "$targetOffset" -} -getTargetRoleEnvHook () -{ - - getRole "$depTargetOffset" -} -getTargetRoleWrapper () -{ - - case $targetOffset in - -1) - export NIX_BINTOOLS_WRAPPER_TARGET_BUILD_x86_64_unknown_linux_gnu=1 - ;; - 0) - export NIX_BINTOOLS_WRAPPER_TARGET_HOST_x86_64_unknown_linux_gnu=1 - ;; - 1) - export NIX_BINTOOLS_WRAPPER_TARGET_TARGET_x86_64_unknown_linux_gnu=1 - ;; - *) - echo "binutils-wrapper-2.44: used as improper sort of dependency" 1>&2; - return 1 - ;; - esac -} -installCheckPhase () -{ - - runHook preInstallCheck; - if [[ -z "${foundMakefile:-}" ]]; then - echo "no Makefile or custom installCheckPhase, doing nothing"; - else - if [[ -z "${installCheckTarget:-}" ]] && ! make -n ${makefile:+-f $makefile} "${installCheckTarget:-installcheck}" > /dev/null 2>&1; then - echo "no installcheck target in ${makefile:-Makefile}, doing nothing"; - else - local flagsArray=(${enableParallelChecking:+-j${NIX_BUILD_CORES}} SHELL="$SHELL"); - concatTo flagsArray makeFlags makeFlagsArray installCheckFlags installCheckFlagsArray installCheckTarget=installcheck; - echoCmd 'installcheck flags' "${flagsArray[@]}"; - make ${makefile:+-f $makefile} "${flagsArray[@]}"; - unset flagsArray; - fi; - fi; - runHook postInstallCheck -} -installPhase () -{ - - runHook preInstall; - if [[ -z "${makeFlags-}" && -z "${makefile:-}" && ! ( -e Makefile || -e makefile || -e GNUmakefile ) ]]; then - echo "no Makefile or custom installPhase, doing nothing"; - runHook postInstall; - return; - else - foundMakefile=1; - fi; - if [ -n "$prefix" ]; then - mkdir -p "$prefix"; - fi; - local flagsArray=(${enableParallelInstalling:+-j${NIX_BUILD_CORES}} SHELL="$SHELL"); - concatTo flagsArray makeFlags makeFlagsArray installFlags installFlagsArray installTargets=install; - echoCmd 'install flags' "${flagsArray[@]}"; - make ${makefile:+-f $makefile} "${flagsArray[@]}"; - unset flagsArray; - runHook postInstall -} -isELF () -{ - - local fn="$1"; - local fd; - local magic; - exec {fd}< "$fn"; - read -r -n 4 -u "$fd" magic; - exec {fd}>&-; - if [ "$magic" = 'ELF' ]; then - return 0; - else - return 1; - fi -} -isMachO () -{ - - local fn="$1"; - local fd; - local magic; - exec {fd}< "$fn"; - read -r -n 4 -u "$fd" magic; - exec {fd}>&-; - if [[ "$magic" = $(echo -ne "\xfe\xed\xfa\xcf") || "$magic" = $(echo -ne "\xcf\xfa\xed\xfe") ]]; then - return 0; - else - if [[ "$magic" = $(echo -ne "\xfe\xed\xfa\xce") || "$magic" = $(echo -ne "\xce\xfa\xed\xfe") ]]; then - return 0; - else - if [[ "$magic" = $(echo -ne "\xca\xfe\xba\xbe") || "$magic" = $(echo -ne "\xbe\xba\xfe\xca") ]]; then - return 0; - else - return 1; - fi; - fi; - fi -} -isScript () -{ - - local fn="$1"; - local fd; - local magic; - exec {fd}< "$fn"; - read -r -n 2 -u "$fd" magic; - exec {fd}>&-; - if [[ "$magic" =~ \#! ]]; then - return 0; - else - return 1; - fi -} -mapOffset () -{ - - local -r inputOffset="$1"; - local -n outputOffset="$2"; - if (( inputOffset <= 0 )); then - outputOffset=$((inputOffset + hostOffset)); - else - outputOffset=$((inputOffset - 1 + targetOffset)); - fi -} -moveToOutput () -{ - - local patt="$1"; - local dstOut="$2"; - local output; - for output in $(getAllOutputNames); - do - if [ "${!output}" = "$dstOut" ]; then - continue; - fi; - local srcPath; - for srcPath in "${!output}"/$patt; - do - if [ ! -e "$srcPath" ] && [ ! -L "$srcPath" ]; then - continue; - fi; - if [ "$dstOut" = REMOVE ]; then - echo "Removing $srcPath"; - rm -r "$srcPath"; - else - local dstPath="$dstOut${srcPath#${!output}}"; - echo "Moving $srcPath to $dstPath"; - if [ -d "$dstPath" ] && [ -d "$srcPath" ]; then - rmdir "$srcPath" --ignore-fail-on-non-empty; - if [ -d "$srcPath" ]; then - mv -t "$dstPath" "$srcPath"/*; - rmdir "$srcPath"; - fi; - else - mkdir -p "$(readlink -m "$dstPath/..")"; - mv "$srcPath" "$dstPath"; - fi; - fi; - local srcParent="$(readlink -m "$srcPath/..")"; - if [ -n "$(find "$srcParent" -maxdepth 0 -type d -empty 2> /dev/null)" ]; then - echo "Removing empty $srcParent/ and (possibly) its parents"; - rmdir -p --ignore-fail-on-non-empty "$srcParent" 2> /dev/null || true; - fi; - done; - done -} -nixChattyLog () -{ - - _nixLogWithLevel 5 "$*" -} -nixDebugLog () -{ - - _nixLogWithLevel 6 "$*" -} -nixErrorLog () -{ - - _nixLogWithLevel 0 "$*" -} -nixInfoLog () -{ - - _nixLogWithLevel 3 "$*" -} -nixLog () -{ - - [[ -z ${NIX_LOG_FD-} ]] && return 0; - local callerName="${FUNCNAME[1]}"; - if [[ $callerName == "_callImplicitHook" ]]; then - callerName="${hookName:?}"; - fi; - printf "%s: %s\n" "$callerName" "$*" >&"$NIX_LOG_FD" -} -nixNoticeLog () -{ - - _nixLogWithLevel 2 "$*" -} -nixTalkativeLog () -{ - - _nixLogWithLevel 4 "$*" -} -nixVomitLog () -{ - - _nixLogWithLevel 7 "$*" -} -nixWarnLog () -{ - - _nixLogWithLevel 1 "$*" -} -noBrokenSymlinks () -{ - - local -r output="${1:?}"; - local path; - local pathParent; - local symlinkTarget; - local -i numDanglingSymlinks=0; - local -i numReflexiveSymlinks=0; - local -i numUnreadableSymlinks=0; - if [[ ! -e $output ]]; then - nixWarnLog "skipping non-existent output $output"; - return 0; - fi; - nixInfoLog "running on $output"; - while IFS= read -r -d '' path; do - pathParent="$(dirname "$path")"; - if ! symlinkTarget="$(readlink "$path")"; then - nixErrorLog "the symlink $path is unreadable"; - numUnreadableSymlinks+=1; - continue; - fi; - if [[ $symlinkTarget == /* ]]; then - nixInfoLog "symlink $path points to absolute target $symlinkTarget"; - else - nixInfoLog "symlink $path points to relative target $symlinkTarget"; - symlinkTarget="$(realpath --no-symlinks --canonicalize-missing "$pathParent/$symlinkTarget")"; - fi; - if [[ $symlinkTarget != "$NIX_STORE"/* ]]; then - nixInfoLog "symlink $path points outside the Nix store; ignoring"; - continue; - fi; - if [[ $path == "$symlinkTarget" ]]; then - nixErrorLog "the symlink $path is reflexive"; - numReflexiveSymlinks+=1; - else - if [[ ! -e $symlinkTarget ]]; then - nixErrorLog "the symlink $path points to a missing target: $symlinkTarget"; - numDanglingSymlinks+=1; - else - nixDebugLog "the symlink $path is irreflexive and points to a target which exists"; - fi; - fi; - done < <(find "$output" -type l -print0); - if ((numDanglingSymlinks > 0 || numReflexiveSymlinks > 0 || numUnreadableSymlinks > 0)); then - nixErrorLog "found $numDanglingSymlinks dangling symlinks, $numReflexiveSymlinks reflexive symlinks and $numUnreadableSymlinks unreadable symlinks"; - exit 1; - fi; - return 0 -} -noBrokenSymlinksInAllOutputs () -{ - - if [[ -z ${dontCheckForBrokenSymlinks-} ]]; then - for output in $(getAllOutputNames); - do - noBrokenSymlinks "${!output}"; - done; - fi -} -patchELF () -{ - - local dir="$1"; - [ -e "$dir" ] || return 0; - echo "shrinking RPATHs of ELF executables and libraries in $dir"; - local i; - while IFS= read -r -d '' i; do - if [[ "$i" =~ .build-id ]]; then - continue; - fi; - if ! isELF "$i"; then - continue; - fi; - echo "shrinking $i"; - patchelf --shrink-rpath "$i" || true; - done < <(find "$dir" -type f -print0) -} -patchPhase () -{ - - runHook prePatch; - local -a patchesArray; - concatTo patchesArray patches; - local -a flagsArray; - concatTo flagsArray patchFlags=-p1; - for i in "${patchesArray[@]}"; - do - echo "applying patch $i"; - local uncompress=cat; - case "$i" in - *.gz) - uncompress="gzip -d" - ;; - *.bz2) - uncompress="bzip2 -d" - ;; - *.xz) - uncompress="xz -d" - ;; - *.lzma) - uncompress="lzma -d" - ;; - esac; - $uncompress < "$i" 2>&1 | patch "${flagsArray[@]}"; - done; - runHook postPatch -} -patchShebangs () -{ - - local pathName; - local update=false; - while [[ $# -gt 0 ]]; do - case "$1" in - --host) - pathName=HOST_PATH; - shift - ;; - --build) - pathName=PATH; - shift - ;; - --update) - update=true; - shift - ;; - --) - shift; - break - ;; - -* | --*) - echo "Unknown option $1 supplied to patchShebangs" 1>&2; - return 1 - ;; - *) - break - ;; - esac; - done; - echo "patching script interpreter paths in $@"; - local f; - local oldPath; - local newPath; - local arg0; - local args; - local oldInterpreterLine; - local newInterpreterLine; - if [[ $# -eq 0 ]]; then - echo "No arguments supplied to patchShebangs" 1>&2; - return 0; - fi; - local f; - while IFS= read -r -d '' f; do - isScript "$f" || continue; - read -r oldInterpreterLine < "$f" || [ "$oldInterpreterLine" ]; - read -r oldPath arg0 args <<< "${oldInterpreterLine:2}"; - if [[ -z "${pathName:-}" ]]; then - if [[ -n $strictDeps && $f == "$NIX_STORE"* ]]; then - pathName=HOST_PATH; - else - pathName=PATH; - fi; - fi; - if [[ "$oldPath" == *"/bin/env" ]]; then - if [[ $arg0 == "-S" ]]; then - arg0=${args%% *}; - [[ "$args" == *" "* ]] && args=${args#* } || args=; - newPath="$(PATH="${!pathName}" type -P "env" || true)"; - args="-S $(PATH="${!pathName}" type -P "$arg0" || true) $args"; - else - if [[ $arg0 == "-"* || $arg0 == *"="* ]]; then - echo "$f: unsupported interpreter directive \"$oldInterpreterLine\" (set dontPatchShebangs=1 and handle shebang patching yourself)" 1>&2; - exit 1; - else - newPath="$(PATH="${!pathName}" type -P "$arg0" || true)"; - fi; - fi; - else - if [[ -z $oldPath ]]; then - oldPath="/bin/sh"; - fi; - newPath="$(PATH="${!pathName}" type -P "$(basename "$oldPath")" || true)"; - args="$arg0 $args"; - fi; - newInterpreterLine="$newPath $args"; - newInterpreterLine=${newInterpreterLine%${newInterpreterLine##*[![:space:]]}}; - if [[ -n "$oldPath" && ( "$update" == true || "${oldPath:0:${#NIX_STORE}}" != "$NIX_STORE" ) ]]; then - if [[ -n "$newPath" && "$newPath" != "$oldPath" ]]; then - echo "$f: interpreter directive changed from \"$oldInterpreterLine\" to \"$newInterpreterLine\""; - escapedInterpreterLine=${newInterpreterLine//\\/\\\\}; - timestamp=$(stat --printf "%y" "$f"); - sed -i -e "1 s|.*|#\!$escapedInterpreterLine|" "$f"; - touch --date "$timestamp" "$f"; - fi; - fi; - done < <(find "$@" -type f -perm -0100 -print0) -} -patchShebangsAuto () -{ - - if [[ -z "${dontPatchShebangs-}" && -e "$prefix" ]]; then - if [[ "$output" != out && "$output" = "$outputDev" ]]; then - patchShebangs --build "$prefix"; - else - patchShebangs --host "$prefix"; - fi; - fi -} -prependToVar () -{ - - local -n nameref="$1"; - local useArray type; - if [ -n "$__structuredAttrs" ]; then - useArray=true; - else - useArray=false; - fi; - if type=$(declare -p "$1" 2> /dev/null); then - case "${type#* }" in - -A*) - echo "prependToVar(): ERROR: trying to use prependToVar on an associative array." 1>&2; - return 1 - ;; - -a*) - useArray=true - ;; - *) - useArray=false - ;; - esac; - fi; - shift; - if $useArray; then - nameref=("$@" ${nameref+"${nameref[@]}"}); - else - nameref="$* ${nameref-}"; - fi -} -printLines () -{ - - (( "$#" > 0 )) || return 0; - printf '%s\n' "$@" -} -printWords () -{ - - (( "$#" > 0 )) || return 0; - printf '%s ' "$@" -} -recordPropagatedDependencies () -{ - - declare -ra flatVars=(depsBuildBuildPropagated propagatedNativeBuildInputs depsBuildTargetPropagated depsHostHostPropagated propagatedBuildInputs depsTargetTargetPropagated); - declare -ra flatFiles=("${propagatedBuildDepFiles[@]}" "${propagatedHostDepFiles[@]}" "${propagatedTargetDepFiles[@]}"); - local propagatedInputsIndex; - for propagatedInputsIndex in "${!flatVars[@]}"; - do - local propagatedInputsSlice="${flatVars[$propagatedInputsIndex]}[@]"; - local propagatedInputsFile="${flatFiles[$propagatedInputsIndex]}"; - [[ -n "${!propagatedInputsSlice}" ]] || continue; - mkdir -p "${!outputDev}/nix-support"; - printWords ${!propagatedInputsSlice} > "${!outputDev}/nix-support/$propagatedInputsFile"; - done -} -runHook () -{ - - local hookName="$1"; - shift; - local hooksSlice="${hookName%Hook}Hooks[@]"; - local hook; - for hook in "_callImplicitHook 0 $hookName" ${!hooksSlice+"${!hooksSlice}"}; - do - _logHook "$hookName" "$hook" "$@"; - _eval "$hook" "$@"; - done; - return 0 -} -runOneHook () -{ - - local hookName="$1"; - shift; - local hooksSlice="${hookName%Hook}Hooks[@]"; - local hook ret=1; - for hook in "_callImplicitHook 1 $hookName" ${!hooksSlice+"${!hooksSlice}"}; - do - _logHook "$hookName" "$hook" "$@"; - if _eval "$hook" "$@"; then - ret=0; - break; - fi; - done; - return "$ret" -} -runPhase () -{ - - local curPhase="$*"; - if [[ "$curPhase" = unpackPhase && -n "${dontUnpack:-}" ]]; then - return; - fi; - if [[ "$curPhase" = patchPhase && -n "${dontPatch:-}" ]]; then - return; - fi; - if [[ "$curPhase" = configurePhase && -n "${dontConfigure:-}" ]]; then - return; - fi; - if [[ "$curPhase" = buildPhase && -n "${dontBuild:-}" ]]; then - return; - fi; - if [[ "$curPhase" = checkPhase && -z "${doCheck:-}" ]]; then - return; - fi; - if [[ "$curPhase" = installPhase && -n "${dontInstall:-}" ]]; then - return; - fi; - if [[ "$curPhase" = fixupPhase && -n "${dontFixup:-}" ]]; then - return; - fi; - if [[ "$curPhase" = installCheckPhase && -z "${doInstallCheck:-}" ]]; then - return; - fi; - if [[ "$curPhase" = distPhase && -z "${doDist:-}" ]]; then - return; - fi; - showPhaseHeader "$curPhase"; - dumpVars; - local startTime endTime; - startTime=$(date +"%s"); - eval "${!curPhase:-$curPhase}"; - endTime=$(date +"%s"); - showPhaseFooter "$curPhase" "$startTime" "$endTime"; - if [ "$curPhase" = unpackPhase ]; then - [ -n "${sourceRoot:-}" ] && chmod +x -- "${sourceRoot}"; - cd -- "${sourceRoot:-.}"; - fi -} -showPhaseFooter () -{ - - local phase="$1"; - local startTime="$2"; - local endTime="$3"; - local delta=$(( endTime - startTime )); - (( delta < 30 )) && return; - local H=$((delta/3600)); - local M=$((delta%3600/60)); - local S=$((delta%60)); - echo -n "$phase completed in "; - (( H > 0 )) && echo -n "$H hours "; - (( M > 0 )) && echo -n "$M minutes "; - echo "$S seconds" -} -showPhaseHeader () -{ - - local phase="$1"; - echo "Running phase: $phase"; - if [[ -z ${NIX_LOG_FD-} ]]; then - return; - fi; - printf "@nix { \"action\": \"setPhase\", \"phase\": \"%s\" }\n" "$phase" >&"$NIX_LOG_FD" -} -stripDirs () -{ - - local cmd="$1"; - local ranlibCmd="$2"; - local paths="$3"; - local stripFlags="$4"; - local excludeFlags=(); - local pathsNew=; - [ -z "$cmd" ] && echo "stripDirs: Strip command is empty" 1>&2 && exit 1; - [ -z "$ranlibCmd" ] && echo "stripDirs: Ranlib command is empty" 1>&2 && exit 1; - local pattern; - if [ -n "${stripExclude:-}" ]; then - for pattern in "${stripExclude[@]}"; - do - excludeFlags+=(-a '!' '(' -name "$pattern" -o -wholename "$prefix/$pattern" ')'); - done; - fi; - local p; - for p in ${paths}; - do - if [ -e "$prefix/$p" ]; then - pathsNew="${pathsNew} $prefix/$p"; - fi; - done; - paths=${pathsNew}; - if [ -n "${paths}" ]; then - echo "stripping (with command $cmd and flags $stripFlags) in $paths"; - local striperr; - striperr="$(mktemp --tmpdir="$TMPDIR" 'striperr.XXXXXX')"; - find $paths -type f "${excludeFlags[@]}" -a '!' -path "$prefix/lib/debug/*" -printf '%D-%i,%p\0' | sort -t, -k1,1 -u -z | cut -d, -f2- -z | xargs -r -0 -n1 -P "$NIX_BUILD_CORES" -- $cmd $stripFlags 2> "$striperr" || exit_code=$?; - [[ "$exit_code" = 123 || -z "$exit_code" ]] || ( cat "$striperr" 1>&2 && exit 1 ); - rm "$striperr"; - find $paths -name '*.a' -type f -exec $ranlibCmd '{}' \; 2> /dev/null; - fi -} -stripHash () -{ - - local strippedName casematchOpt=0; - strippedName="$(basename -- "$1")"; - shopt -q nocasematch && casematchOpt=1; - shopt -u nocasematch; - if [[ "$strippedName" =~ ^[a-z0-9]{32}- ]]; then - echo "${strippedName:33}"; - else - echo "$strippedName"; - fi; - if (( casematchOpt )); then - shopt -s nocasematch; - fi -} -substitute () -{ - - local input="$1"; - local output="$2"; - shift 2; - if [ ! -f "$input" ]; then - echo "substitute(): ERROR: file '$input' does not exist" 1>&2; - return 1; - fi; - local content; - consumeEntire content < "$input"; - if [ -e "$output" ]; then - chmod +w "$output"; - fi; - substituteStream content "file '$input'" "$@" > "$output" -} -substituteAll () -{ - - local input="$1"; - local output="$2"; - local -a args=(); - _allFlags; - substitute "$input" "$output" "${args[@]}" -} -substituteAllInPlace () -{ - - local fileName="$1"; - shift; - substituteAll "$fileName" "$fileName" "$@" -} -substituteAllStream () -{ - - local -a args=(); - _allFlags; - substituteStream "$1" "$2" "${args[@]}" -} -substituteInPlace () -{ - - local -a fileNames=(); - for arg in "$@"; - do - if [[ "$arg" = "--"* ]]; then - break; - fi; - fileNames+=("$arg"); - shift; - done; - if ! [[ "${#fileNames[@]}" -gt 0 ]]; then - echo "substituteInPlace called without any files to operate on (files must come before options!)" 1>&2; - return 1; - fi; - for file in "${fileNames[@]}"; - do - substitute "$file" "$file" "$@"; - done -} -substituteStream () -{ - - local var=$1; - local description=$2; - shift 2; - while (( "$#" )); do - local replace_mode="$1"; - case "$1" in - --replace) - if ! "$_substituteStream_has_warned_replace_deprecation"; then - echo "substituteStream() in derivation $name: WARNING: '--replace' is deprecated, use --replace-{fail,warn,quiet}. ($description)" 1>&2; - _substituteStream_has_warned_replace_deprecation=true; - fi; - replace_mode='--replace-warn' - ;& - --replace-quiet | --replace-warn | --replace-fail) - pattern="$2"; - replacement="$3"; - shift 3; - if ! [[ "${!var}" == *"$pattern"* ]]; then - if [ "$replace_mode" == --replace-warn ]; then - printf "substituteStream() in derivation $name: WARNING: pattern %q doesn't match anything in %s\n" "$pattern" "$description" 1>&2; - else - if [ "$replace_mode" == --replace-fail ]; then - printf "substituteStream() in derivation $name: ERROR: pattern %q doesn't match anything in %s\n" "$pattern" "$description" 1>&2; - return 1; - fi; - fi; - fi; - eval "$var"'=${'"$var"'//"$pattern"/"$replacement"}' - ;; - --subst-var) - local varName="$2"; - shift 2; - if ! [[ "$varName" =~ ^[a-zA-Z_][a-zA-Z0-9_]*$ ]]; then - echo "substituteStream() in derivation $name: ERROR: substitution variables must be valid Bash names, \"$varName\" isn't." 1>&2; - return 1; - fi; - if [ -z ${!varName+x} ]; then - echo "substituteStream() in derivation $name: ERROR: variable \$$varName is unset" 1>&2; - return 1; - fi; - pattern="@$varName@"; - replacement="${!varName}"; - eval "$var"'=${'"$var"'//"$pattern"/"$replacement"}' - ;; - --subst-var-by) - pattern="@$2@"; - replacement="$3"; - eval "$var"'=${'"$var"'//"$pattern"/"$replacement"}'; - shift 3 - ;; - *) - echo "substituteStream() in derivation $name: ERROR: Invalid command line argument: $1" 1>&2; - return 1 - ;; - esac; - done; - printf "%s" "${!var}" -} -unpackFile () -{ - - curSrc="$1"; - echo "unpacking source archive $curSrc"; - if ! runOneHook unpackCmd "$curSrc"; then - echo "do not know how to unpack source archive $curSrc"; - exit 1; - fi -} -unpackPhase () -{ - - runHook preUnpack; - if [ -z "${srcs:-}" ]; then - if [ -z "${src:-}" ]; then - echo 'variable $src or $srcs should point to the source'; - exit 1; - fi; - srcs="$src"; - fi; - local -a srcsArray; - concatTo srcsArray srcs; - local dirsBefore=""; - for i in *; - do - if [ -d "$i" ]; then - dirsBefore="$dirsBefore $i "; - fi; - done; - for i in "${srcsArray[@]}"; - do - unpackFile "$i"; - done; - : "${sourceRoot=}"; - if [ -n "${setSourceRoot:-}" ]; then - runOneHook setSourceRoot; - else - if [ -z "$sourceRoot" ]; then - for i in *; - do - if [ -d "$i" ]; then - case $dirsBefore in - *\ $i\ *) - - ;; - *) - if [ -n "$sourceRoot" ]; then - echo "unpacker produced multiple directories"; - exit 1; - fi; - sourceRoot="$i" - ;; - esac; - fi; - done; - fi; - fi; - if [ -z "$sourceRoot" ]; then - echo "unpacker appears to have produced no directories"; - exit 1; - fi; - echo "source root is $sourceRoot"; - if [ "${dontMakeSourcesWritable:-0}" != 1 ]; then - chmod -R u+w -- "$sourceRoot"; - fi; - runHook postUnpack -} -updateAutotoolsGnuConfigScriptsPhase () -{ - - if [ -n "${dontUpdateAutotoolsGnuConfigScripts-}" ]; then - return; - fi; - for script in config.sub config.guess; - do - for f in $(find . -type f -name "$script"); - do - echo "Updating Autotools / GNU config script to a newer upstream version: $f"; - cp -f "/nix/store/khmqxw6b9q7rgkv6hf3gcqf2igk03z1g-gnu-config-2024-01-01/$script" "$f"; - done; - done -} -updateSourceDateEpoch () -{ - - local path="$1"; - [[ $path == -* ]] && path="./$path"; - local -a res=($(find "$path" -type f -not -newer "$NIX_BUILD_TOP/.." -printf '%T@ "%p"\0' | sort -n --zero-terminated | tail -n1 --zero-terminated | head -c -1)); - local time="${res[0]//\.[0-9]*/}"; - local newestFile="${res[1]}"; - if [ "${time:-0}" -gt "$SOURCE_DATE_EPOCH" ]; then - echo "setting SOURCE_DATE_EPOCH to timestamp $time of file $newestFile"; - export SOURCE_DATE_EPOCH="$time"; - local now="$(date +%s)"; - if [ "$time" -gt $((now - 60)) ]; then - echo "warning: file $newestFile may be generated; SOURCE_DATE_EPOCH may be non-deterministic"; - fi; - fi -} -PATH="$PATH${nix_saved_PATH:+:$nix_saved_PATH}" -XDG_DATA_DIRS="$XDG_DATA_DIRS${nix_saved_XDG_DATA_DIRS:+:$nix_saved_XDG_DATA_DIRS}" -export NIX_BUILD_TOP="$(mktemp -d -t nix-shell.XXXXXX)" -export TMP="$NIX_BUILD_TOP" -export TMPDIR="$NIX_BUILD_TOP" -export TEMP="$NIX_BUILD_TOP" -export TEMPDIR="$NIX_BUILD_TOP" -eval "${shellHook:-}" diff --git a/.gitignore b/.gitignore index 54d83b2..ec5ea30 100644 --- a/.gitignore +++ b/.gitignore @@ -92,3 +92,4 @@ frontend/dist/ !.specify/templates/ !.specify/memory/ +.direnv/ \ No newline at end of file diff --git a/README.md b/README.md index f8f1676..5a7c085 100644 --- a/README.md +++ b/README.md @@ -27,13 +27,38 @@ direnv allow # .envrc already configured ``` **Included tools:** -- Python 3.12 with all backend dependencies (FastAPI, SQLAlchemy, pytest, etc.) +- Python 3.13 with all backend dependencies (FastAPI, SQLAlchemy, pytest, psycopg2, etc.) - Node.js + npm for frontend development - PostgreSQL client tools - MinIO client - Ruff (Python linter/formatter) - All project dependencies from flake.nix +## Quick Start + +```bash +# 1. Setup (first time only) +./scripts/quick-start.sh + +# 2. Start backend (Terminal 1) +nix develop +cd backend +uvicorn app.main:app --reload + +# 3. Start frontend (Terminal 2) +cd frontend +npm install # first time only +npm run dev + +# 4. Test authentication (Terminal 3) +./scripts/test-auth.sh +``` + +**Access:** +- Frontend: http://localhost:5173 +- Backend API Docs: http://localhost:8000/docs +- Backend Health: http://localhost:8000/health + ## Project Structure ``` diff --git a/backend/app/api/auth.py b/backend/app/api/auth.py index 530933d..e14074d 100644 --- a/backend/app/api/auth.py +++ b/backend/app/api/auth.py @@ -47,7 +47,7 @@ def register_user(user_data: UserCreate, db: Session = Depends(get_db)): # Create user user = repo.create_user(email=user_data.email, password=user_data.password) - return UserResponse.from_orm(user) + return UserResponse.model_validate(user) @router.post("/login", response_model=TokenResponse) @@ -91,7 +91,7 @@ def login_user(login_data: UserLogin, db: Session = Depends(get_db)): return TokenResponse( access_token=access_token, token_type="bearer", - user=UserResponse.from_orm(user) + user=UserResponse.model_validate(user) ) @@ -106,5 +106,5 @@ def get_current_user_info(current_user: User = Depends(get_current_user)): Returns: Current user information """ - return UserResponse.from_orm(current_user) + return UserResponse.model_validate(current_user) diff --git a/flake.nix b/flake.nix index 872d253..d25dc9b 100644 --- a/flake.nix +++ b/flake.nix @@ -18,9 +18,13 @@ sqlalchemy alembic pydantic + pydantic-settings # Settings management + psycopg2 # PostgreSQL driver # Auth & Security python-jose passlib + bcrypt # Password hashing backend for passlib + email-validator # Email validation for pydantic # Image processing pillow # Storage diff --git a/frontend/src/routes/register/+page.svelte b/frontend/src/routes/register/+page.svelte index 8ec514e..53a8311 100644 --- a/frontend/src/routes/register/+page.svelte +++ b/frontend/src/routes/register/+page.svelte @@ -42,7 +42,7 @@ }, 1500); } catch (err) { const apiError = err as ApiError; - error = apiError.error || apiError.detail || 'Registration failed. Please try again.'; + error = apiError.error || (apiError.details as any)?.detail || 'Registration failed. Please try again.'; } finally { isLoading = false; } diff --git a/scripts/quick-start.sh b/scripts/quick-start.sh new file mode 100755 index 0000000..f4a0b95 --- /dev/null +++ b/scripts/quick-start.sh @@ -0,0 +1,144 @@ +#!/usr/bin/env bash +# Quick Start Script for Reference Board Viewer +# This script sets up and runs the authentication system for testing + +set -e + +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' + +echo "=========================================" +echo "Reference Board Viewer - Quick Start" +echo "=========================================" +echo "" + +# Check if we're in the right directory +if [ ! -f "flake.nix" ]; then + echo -e "${RED}Error: Please run this script from the project root${NC}" + exit 1 +fi + +# Step 1: Create backend .env if it doesn't exist +echo -e "${YELLOW}Step 1: Setting up backend environment...${NC}" +if [ ! -f "backend/.env" ]; then + echo "Creating backend/.env..." + cat > backend/.env << 'EOF' +# Database +DATABASE_URL=postgresql://localhost/webref + +# JWT Authentication +SECRET_KEY=test-secret-key-change-in-production-$(openssl rand -hex 16) +ALGORITHM=HS256 +ACCESS_TOKEN_EXPIRE_MINUTES=30 + +# MinIO Storage (for later phases) +MINIO_ENDPOINT=localhost:9000 +MINIO_ACCESS_KEY=minioadmin +MINIO_SECRET_KEY=minioadmin +MINIO_BUCKET=webref +MINIO_SECURE=false + +# CORS +CORS_ORIGINS=http://localhost:5173,http://localhost:3000 + +# Application +DEBUG=true +APP_NAME=Reference Board Viewer +APP_VERSION=1.0.0 +API_V1_PREFIX=/api/v1 + +# Logging +LOG_LEVEL=INFO +EOF + echo -e "${GREEN}✓ Created backend/.env${NC}" +else + echo -e "${GREEN}✓ backend/.env already exists${NC}" +fi +echo "" + +# Step 2: Create frontend .env if it doesn't exist +echo -e "${YELLOW}Step 2: Setting up frontend environment...${NC}" +if [ ! -f "frontend/.env" ]; then + echo "Creating frontend/.env..." + cat > frontend/.env << 'EOF' +VITE_API_URL=http://localhost:8000/api/v1 +EOF + echo -e "${GREEN}✓ Created frontend/.env${NC}" +else + echo -e "${GREEN}✓ frontend/.env already exists${NC}" +fi +echo "" + +# Step 3: Check PostgreSQL +echo -e "${YELLOW}Step 3: Checking PostgreSQL...${NC}" +if ! command -v psql &> /dev/null; then + echo -e "${RED}✗ PostgreSQL not found. Please install PostgreSQL.${NC}" + exit 1 +fi + +# Check if database exists +if psql -lqt | cut -d \| -f 1 | grep -qw webref; then + echo -e "${GREEN}✓ Database 'webref' exists${NC}" +else + echo "Creating database 'webref'..." + createdb webref || { + echo -e "${RED}✗ Failed to create database. Make sure PostgreSQL is running.${NC}" + echo "Try: sudo systemctl start postgresql" + exit 1 + } + echo -e "${GREEN}✓ Created database 'webref'${NC}" +fi +echo "" + +# Step 4: Run migrations +echo -e "${YELLOW}Step 4: Running database migrations...${NC}" +echo "This requires the Nix development environment..." +if command -v nix &> /dev/null; then + nix develop -c bash -c "cd backend && alembic upgrade head" || { + echo -e "${RED}✗ Migration failed${NC}" + echo "You may need to run manually:" + echo " nix develop" + echo " cd backend" + echo " alembic upgrade head" + exit 1 + } + echo -e "${GREEN}✓ Migrations complete${NC}" +else + echo -e "${YELLOW}⚠ Nix not found. Please run migrations manually:${NC}" + echo " nix develop" + echo " cd backend" + echo " alembic upgrade head" +fi +echo "" + +echo "=========================================" +echo -e "${GREEN}Setup Complete!${NC}" +echo "=========================================" +echo "" +echo "Next steps:" +echo "" +echo "1. Start the backend server (in one terminal):" +echo " $ nix develop" +echo " $ cd backend" +echo " $ uvicorn app.main:app --reload" +echo "" +echo "2. Start the frontend server (in another terminal):" +echo " $ cd frontend" +echo " $ npm install # if not done already" +echo " $ npm run dev" +echo "" +echo "3. Test the API:" +echo " $ ./test-auth.sh" +echo "" +echo "4. Open browser:" +echo " Backend API docs: http://localhost:8000/docs" +echo " Frontend app: http://localhost:5173" +echo "" +echo "5. Try registration:" +echo " - Navigate to http://localhost:5173/register" +echo " - Create an account" +echo " - Login and explore!" +echo "" + diff --git a/scripts/test-auth.sh b/scripts/test-auth.sh new file mode 100755 index 0000000..802911a --- /dev/null +++ b/scripts/test-auth.sh @@ -0,0 +1,145 @@ +#!/usr/bin/env bash +# Authentication Testing Script +# Run this after starting the backend server + +set -e + +API_BASE="http://localhost:8000" +API_V1="${API_BASE}/api/v1" + +GREEN='\033[0;32m' +RED='\033[0;31m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +echo "=========================================" +echo "Testing Reference Board Viewer Auth API" +echo "=========================================" +echo "" + +# Test 1: Health Check +echo -e "${YELLOW}Test 1: Health Check${NC}" +response=$(curl -s "${API_BASE}/health") +if echo "$response" | grep -q "healthy"; then + echo -e "${GREEN}✓ Health check passed${NC}" +else + echo -e "${RED}✗ Health check failed${NC}" + echo "Response: $response" + exit 1 +fi +echo "" + +# Test 2: Register User +echo -e "${YELLOW}Test 2: Register New User${NC}" +email="test_$(date +%s)@example.com" +password="TestPass123" + +register_response=$(curl -s -X POST "${API_V1}/auth/register" \ + -H "Content-Type: application/json" \ + -d "{\"email\":\"${email}\",\"password\":\"${password}\"}") + +if echo "$register_response" | grep -q "id"; then + echo -e "${GREEN}✓ User registration successful${NC}" + echo "Email: $email" +else + echo -e "${RED}✗ User registration failed${NC}" + echo "Response: $register_response" + exit 1 +fi +echo "" + +# Test 3: Login User +echo -e "${YELLOW}Test 3: Login User${NC}" +login_response=$(curl -s -X POST "${API_V1}/auth/login" \ + -H "Content-Type: application/json" \ + -d "{\"email\":\"${email}\",\"password\":\"${password}\"}") + +if echo "$login_response" | grep -q "access_token"; then + echo -e "${GREEN}✓ Login successful${NC}" + token=$(echo "$login_response" | grep -o '"access_token":"[^"]*"' | cut -d'"' -f4) + echo "Token: ${token:0:20}..." +else + echo -e "${RED}✗ Login failed${NC}" + echo "Response: $login_response" + exit 1 +fi +echo "" + +# Test 4: Get Current User (Protected) +echo -e "${YELLOW}Test 4: Get Current User (Protected Endpoint)${NC}" +me_response=$(curl -s "${API_V1}/auth/me" \ + -H "Authorization: Bearer ${token}") + +if echo "$me_response" | grep -q "$email"; then + echo -e "${GREEN}✓ Protected endpoint works${NC}" +else + echo -e "${RED}✗ Protected endpoint failed${NC}" + echo "Response: $me_response" + exit 1 +fi +echo "" + +# Test 5: Invalid Token +echo -e "${YELLOW}Test 5: Test Invalid Token${NC}" +invalid_response=$(curl -s "${API_V1}/auth/me" \ + -H "Authorization: Bearer invalid-token-here") + +if echo "$invalid_response" | grep -q "Invalid\|Unauthorized"; then + echo -e "${GREEN}✓ Invalid token correctly rejected${NC}" +else + echo -e "${RED}✗ Invalid token not rejected properly${NC}" + echo "Response: $invalid_response" +fi +echo "" + +# Test 6: Duplicate Registration +echo -e "${YELLOW}Test 6: Test Duplicate Registration${NC}" +duplicate_response=$(curl -s -X POST "${API_V1}/auth/register" \ + -H "Content-Type: application/json" \ + -d "{\"email\":\"${email}\",\"password\":\"${password}\"}") + +if echo "$duplicate_response" | grep -q "already registered\|Conflict\|409"; then + echo -e "${GREEN}✓ Duplicate registration correctly rejected${NC}" +else + echo -e "${RED}✗ Duplicate registration should be rejected${NC}" + echo "Response: $duplicate_response" +fi +echo "" + +# Test 7: Weak Password +echo -e "${YELLOW}Test 7: Test Weak Password${NC}" +weak_response=$(curl -s -X POST "${API_V1}/auth/register" \ + -H "Content-Type: application/json" \ + -d "{\"email\":\"weak_$(date +%s)@example.com\",\"password\":\"weak\"}") + +if echo "$weak_response" | grep -q "Password\|validation\|400"; then + echo -e "${GREEN}✓ Weak password correctly rejected${NC}" +else + echo -e "${RED}✗ Weak password should be rejected${NC}" + echo "Response: $weak_response" +fi +echo "" + +# Test 8: Wrong Password +echo -e "${YELLOW}Test 8: Test Wrong Password${NC}" +wrong_pass_response=$(curl -s -X POST "${API_V1}/auth/login" \ + -H "Content-Type: application/json" \ + -d "{\"email\":\"${email}\",\"password\":\"WrongPass123\"}") + +if echo "$wrong_pass_response" | grep -q "Incorrect\|Unauthorized\|401"; then + echo -e "${GREEN}✓ Wrong password correctly rejected${NC}" +else + echo -e "${RED}✗ Wrong password should be rejected${NC}" + echo "Response: $wrong_pass_response" +fi +echo "" + +echo "=========================================" +echo -e "${GREEN}All authentication tests passed!${NC}" +echo "=========================================" +echo "" +echo "Test user created:" +echo " Email: $email" +echo " Password: $password" +echo " Token: ${token:0:30}..." + diff --git a/specs/001-reference-board-viewer/PLANNING-COMPLETE.md b/specs/001-reference-board-viewer/PLANNING-COMPLETE.md deleted file mode 100644 index 6e0427c..0000000 --- a/specs/001-reference-board-viewer/PLANNING-COMPLETE.md +++ /dev/null @@ -1,391 +0,0 @@ -# ✅ PLANNING COMPLETE: Reference Board Viewer - -**Date:** 2025-11-02 -**Branch:** 001-reference-board-viewer -**Status:** Ready for Implementation (Week 1) - ---- - -## Executive Summary - -Complete implementation plan ready for a web-based reference board application (PureRef-inspired) for artists and creative professionals. All research, design, and planning artifacts have been generated and verified. - -**Technology Stack:** ✅ 100% Verified in Nix -**Timeline:** 16 weeks to MVP -**Team Size:** 2-3 developers recommended - ---- - -## Workflow Completion Status - -### Phase 0: Research & Design ✅ COMPLETE - -| Artifact | Status | Description | -|----------|--------|-------------| -| **tech-research.md** | ✅ Complete (18KB) | Comprehensive technology stack analysis with alternatives | -| **nix-package-verification.md** | ✅ Complete | Detailed verification of all packages in nixpkgs | -| **VERIFICATION-COMPLETE.md** | ✅ Complete | Proof of 100% Nix compatibility + command outputs | -| **Clarifications** | ✅ Resolved | All 3 NEEDS CLARIFICATION items resolved | - -**Key Decisions:** -- Frontend: Svelte + SvelteKit + Konva.js -- Backend: FastAPI (Python) -- Database: PostgreSQL -- Storage: MinIO (S3-compatible) -- Image Processing: Pillow + ImageMagick -- Deployment: Nix Flakes + NixOS modules - -### Phase 1: Design & Contracts ✅ COMPLETE - -| Artifact | Status | Lines | Description | -|----------|--------|-------|-------------| -| **data-model.md** | ✅ Complete | 650+ | Full database schema with all entities | -| **contracts/api.yaml** | ✅ Complete | 900+ | OpenAPI 3.0 spec for REST API | -| **plan.md** | ✅ Complete | 750+ | 16-week implementation plan | -| **quickstart.md** | ✅ Complete | 400+ | Developer getting-started guide | - -**Agent Context:** ✅ Updated (.cursor/rules/specify-rules.mdc) - ---- - -## Generated Artifacts - -### 📄 Specification Documents - -``` -specs/001-reference-board-viewer/ -├── spec.md ✅ 708 lines (Requirements) -├── plan.md ✅ 750 lines (Implementation plan) -├── data-model.md ✅ 650 lines (Database schema) -├── tech-research.md ✅ 661 lines (Technology analysis) -├── nix-package-verification.md ✅ 468 lines (Package verification) -├── VERIFICATION-COMPLETE.md ✅ Summary + proof -├── PLANNING-COMPLETE.md ✅ This file -├── quickstart.md ✅ 400 lines (Getting started) -├── contracts/ -│ └── api.yaml ✅ 900 lines (OpenAPI spec) -└── checklists/ - └── requirements.md ✅ 109 lines (Quality validation) - -Total: ~5,100 lines of comprehensive documentation -``` - -### 🔬 Research Findings - -**Technology Evaluation:** -- ✅ 14 different options analyzed -- ✅ Frontend: React vs Svelte vs Vue (Svelte chosen) -- ✅ Canvas: Konva vs Fabric vs PixiJS (Konva chosen) -- ✅ Backend: FastAPI vs Django vs Node vs Rust (FastAPI chosen) -- ✅ All decisions documented with rationale - -**Nix Verification:** -- ✅ 27 packages checked -- ✅ 27 packages verified -- ✅ 0 packages missing -- ✅ 100% compatibility confirmed - -### 🗄️ Data Model - -**7 Core Entities Defined:** -1. User (authentication, account management) -2. Board (canvas, viewport state) -3. Image (uploaded files, metadata) -4. BoardImage (junction: position, transformations) -5. Group (annotations, colored labels) -6. ShareLink (configurable permissions) -7. Comment (viewer feedback) - -**Complete Schema:** -- ✅ All fields defined with types and constraints -- ✅ Indexes specified for performance -- ✅ Relationships mapped -- ✅ Validation rules documented -- ✅ PostgreSQL CREATE statements provided - -### 🔌 API Contracts - -**28 Endpoints Defined:** - -**Authentication (3):** -- POST /auth/register -- POST /auth/login -- GET /auth/me - -**Boards (5):** -- GET /boards -- POST /boards -- GET /boards/{id} -- PATCH /boards/{id} -- DELETE /boards/{id} - -**Images (4):** -- POST /boards/{id}/images -- PATCH /boards/{id}/images/{id} -- DELETE /boards/{id}/images/{id} -- PATCH /boards/{id}/images/bulk - -**Groups (4):** -- GET /boards/{id}/groups -- POST /boards/{id}/groups -- PATCH /boards/{id}/groups/{id} -- DELETE /boards/{id}/groups/{id} - -**Sharing (4):** -- GET /boards/{id}/share-links -- POST /boards/{id}/share-links -- DELETE /boards/{id}/share-links/{id} -- GET /shared/{token} - -**Export & Library (3):** -- POST /boards/{id}/export -- GET /library/images - -**All endpoints include:** -- Request/response schemas -- Authentication requirements -- Error responses -- Example payloads - ---- - -## Implementation Roadmap - -### Timeline: 16 Weeks (4 Months) - -| Phase | Weeks | Focus | Deliverables | -|-------|-------|-------|--------------| -| **Phase 1** | 1-4 | Foundation | Auth, Boards, Upload, Storage | -| **Phase 2** | 5-8 | Canvas | Manipulation, Transforms, Multi-select | -| **Phase 3** | 9-12 | Advanced | Groups, Sharing, Export | -| **Phase 4** | 13-16 | Polish | Performance, Testing, Deployment | - -### Week-by-Week Breakdown - -**Week 1:** Project setup, Nix config, CI/CD -**Week 2:** Authentication system (JWT) -**Week 3:** Board CRUD operations -**Week 4:** Image upload & MinIO -**Week 5:** Canvas foundation (Konva.js) -**Week 6:** Image transformations -**Week 7:** Multi-selection & bulk ops -**Week 8:** Z-order & layering -**Week 9:** Grouping & annotations -**Week 10:** Alignment & distribution -**Week 11:** Board sharing (permissions) -**Week 12:** Export (ZIP, composite) -**Week 13:** Performance & adaptive quality -**Week 14:** Command palette & features -**Week 15:** Testing & accessibility -**Week 16:** Deployment & documentation - ---- - -## Success Criteria - -### Functional ✅ Defined -- [ ] 18 functional requirements implemented -- [ ] All user scenarios work end-to-end -- [ ] No critical bugs -- [ ] Beta users complete workflows - -### Quality ✅ Defined -- [ ] ≥80% test coverage (pytest + Vitest) -- [ ] Zero linter errors (Ruff + ESLint) -- [ ] All tests passing in CI -- [ ] Code reviews approved - -### Performance ✅ Defined -- [ ] Canvas 60fps with 500 images -- [ ] API <200ms p95 -- [ ] Page load <3s on 5Mbps -- [ ] Board with 100 images loads <2s - -### Accessibility ✅ Defined -- [ ] WCAG 2.1 AA compliant -- [ ] Keyboard navigation for all features -- [ ] User-friendly error messages -- [ ] 90%+ "easy to use" rating - -### Deployment ✅ Defined -- [ ] `nixos-rebuild` deploys successfully -- [ ] All services start correctly -- [ ] Rollback works -- [ ] Documentation complete - ---- - -## Constitutional Compliance - -All planning aligns with project constitution: - -✅ **Principle 1 (Code Quality):** Modular architecture, type hints, linting -✅ **Principle 2 (Testing):** ≥80% coverage, comprehensive test strategy -✅ **Principle 3 (UX):** WCAG 2.1 AA, keyboard nav, clear errors -✅ **Principle 4 (Performance):** Specific budgets (60fps, <200ms, etc) - ---- - -## Technology Stack Summary - -### Frontend -```javascript -- Framework: Svelte + SvelteKit -- Canvas: Konva.js -- Build: Vite -- Package Manager: npm (via Nix buildNpmPackage) -- State: Svelte Stores -- Testing: Vitest + Testing Library + Playwright -``` - -### Backend -```python -- Framework: FastAPI -- Server: Uvicorn -- ORM: SQLAlchemy -- Migrations: Alembic -- Validation: Pydantic -- Auth: python-jose + passlib -- Image Processing: Pillow + ImageMagick -- Storage Client: boto3 (S3-compatible) -- Testing: pytest + pytest-cov + pytest-asyncio -``` - -### Infrastructure -```nix -- Database: PostgreSQL 16 -- Storage: MinIO (S3-compatible) -- Reverse Proxy: Nginx -- Deployment: Nix Flakes + NixOS modules -- Package Manager: uv (Python) + npm (JS) -``` - -**All Verified:** See VERIFICATION-COMPLETE.md - ---- - -## Next Steps - -### Immediate (Week 1) - -1. **Review all documents:** - - Read spec.md (requirements) - - Read plan.md (implementation strategy) - - Read data-model.md (database design) - - Review contracts/api.yaml (API design) - -2. **Set up environment:** - - Follow quickstart.md - - Create flake.nix (based on examples in nix-package-verification.md) - - Initialize Git repository structure - - Set up CI/CD pipeline - -3. **Create project structure:** - ```bash - mkdir -p backend/{app,tests} - mkdir -p frontend/{src,tests} - mkdir -p docs - ``` - -4. **Start Week 1 tasks:** - - See plan.md, Phase 1, Week 1 - - Initialize backend (FastAPI + uv) - - Initialize frontend (SvelteKit + Vite) - - Configure PostgreSQL with Nix - - Set up pre-commit hooks - -### This Week (Week 2-4) - -- Complete Phase 1 (Foundation) -- Implement authentication -- Build board CRUD -- Set up image upload & storage - -### This Month (Weeks 1-8) - -- Complete Phases 1 & 2 -- Working canvas with manipulation -- Multi-selection and transformations - ---- - -## Documentation Map - -| Document | Purpose | When to Use | -|----------|---------|-------------| -| **spec.md** | Requirements | Understanding WHAT to build | -| **plan.md** | Implementation | Knowing HOW to build it | -| **data-model.md** | Database | Designing data structures | -| **contracts/api.yaml** | API | Implementing endpoints | -| **tech-research.md** | Technology | Understanding WHY we chose tech | -| **quickstart.md** | Getting Started | First day of development | -| **VERIFICATION-COMPLETE.md** | Nix Proof | Confirming package availability | - ---- - -## Key Files Reference - -### Planning Documents -``` -specs/001-reference-board-viewer/ -├── spec.md Requirements specification -├── plan.md Implementation plan (this is the main guide) -├── data-model.md Database schema design -├── quickstart.md Getting started guide -├── tech-research.md Technology evaluation -├── nix-package-verification.md Package verification details -└── VERIFICATION-COMPLETE.md Verification summary -``` - -### API & Contracts -``` -specs/001-reference-board-viewer/contracts/ -└── api.yaml OpenAPI 3.0 specification -``` - -### Quality Assurance -``` -specs/001-reference-board-viewer/checklists/ -└── requirements.md Quality validation checklist -``` - ---- - -## Resources - -### Internal -- Main README: ../../README.md -- Constitution: ../../.specify/memory/constitution.md -- Templates: ../../.specify/templates/ - -### External -- FastAPI Docs: https://fastapi.tiangolo.com/ -- Svelte Docs: https://svelte.dev/docs -- Konva.js Docs: https://konvajs.org/docs/ -- Nix Manual: https://nixos.org/manual/nix/stable/ -- PostgreSQL Docs: https://www.postgresql.org/docs/ -- MinIO Docs: https://min.io/docs/ - ---- - -## Summary - -✅ **Planning Phase:** COMPLETE -✅ **Research:** COMPLETE -✅ **Design:** COMPLETE -✅ **Contracts:** COMPLETE -✅ **Nix Verification:** COMPLETE - -**Status:** ✅ READY FOR WEEK 1 IMPLEMENTATION - -**Next Action:** Follow [quickstart.md](./quickstart.md) to set up development environment and begin Week 1 tasks from [plan.md](./plan.md). - ---- - -**Timeline:** 16 weeks to MVP -**Start Date:** Ready now -**Team:** 2-3 developers recommended -**Deployment:** Self-hosted NixOS with reproducible builds - -🚀 **Let's build this!** - diff --git a/specs/001-reference-board-viewer/TASKS-GENERATED.md b/specs/001-reference-board-viewer/TASKS-GENERATED.md deleted file mode 100644 index 6c70ceb..0000000 --- a/specs/001-reference-board-viewer/TASKS-GENERATED.md +++ /dev/null @@ -1,283 +0,0 @@ -# ✅ TASKS GENERATED: Implementation Ready - -**Date:** 2025-11-02 -**Feature:** 001-reference-board-viewer -**Branch:** 001-reference-board-viewer -**Status:** ✅ Ready for Week 1 Execution - ---- - -## Summary - -Comprehensive task breakdown generated with **331 actionable tasks** organized by user story for independent, parallel implementation. - ---- - -## Generated Artifacts - -### tasks.md Statistics - -- **Total Tasks:** 331 -- **Phases:** 25 (1 setup + 1 foundational + 18 user stories + 5 cross-cutting) -- **User Stories:** 18 (mapped from FR1-FR18 in spec.md) -- **Parallelizable Tasks:** 142 tasks marked with [P] -- **Average Tasks per User Story:** 18 tasks - -### Task Organization - -**By Priority:** -- Critical stories (US1-US6): 126 tasks -- High priority stories (US7-US13): 88 tasks -- Medium priority stories (US14-US16): 27 tasks -- Low priority stories (US17-US18): 14 tasks -- Infrastructure/Polish: 76 tasks - -**By Component:** -- Backend tasks: ~160 tasks -- Frontend tasks: ~145 tasks -- Infrastructure: ~26 tasks - ---- - -## User Story Mapping - -Each functional requirement from spec.md mapped to user story: - -| Story | Requirement | Priority | Tasks | Week | -|-------|-------------|----------|-------|------| -| US1 | FR1: Authentication | Critical | 20 | 2 | -| US2 | FR2: Board Management | Critical | 20 | 3 | -| US3 | FR4: Image Upload | Critical | 24 | 4 | -| US4 | FR12: Canvas Navigation | Critical | 11 | 5 | -| US5 | FR5: Image Positioning | Critical | 19 | 5-6 | -| US6 | FR8: Transformations | Critical | 12 | 6 | -| US7 | FR9: Multi-Selection | High | 11 | 7 | -| US8 | FR10: Clipboard Operations | High | 10 | 7 | -| US9 | FR6: Alignment & Distribution | High | 9 | 10 | -| US10 | FR7: Grouping & Annotations | High | 17 | 9 | -| US11 | FR3: Board Sharing | High | 19 | 11 | -| US12 | FR15: Export & Download | High | 12 | 12 | -| US13 | FR16: Adaptive Quality | High | 10 | 13 | -| US14 | FR17: Image Library & Reuse | Medium | 12 | 14 | -| US15 | FR11: Command Palette | Medium | 7 | 14 | -| US16 | FR13: Focus Mode | Medium | 8 | 14 | -| US17 | FR14: Slideshow Mode | Low | 7 | 14 | -| US18 | FR18: Auto-Arrange | Low | 7 | 14 | - ---- - -## Task Format Validation ✅ - -All 331 tasks follow the required format: - -``` -- [ ] [T###] [P?] [US#?] Description with file path -``` - -**Examples:** -``` -✅ - [ ] T036 [P] [US1] Create User model in backend/app/database/models/user.py -✅ - [ ] T100 [US4] Initialize Konva.js Stage in frontend/src/lib/canvas/Stage.svelte -✅ - [ ] T163 [US9] Implement align top/bottom in frontend/src/lib/canvas/operations/align.ts -``` - -**Validation Results:** -- ✅ All tasks have checkbox `- [ ]` -- ✅ All tasks have sequential ID (T001-T331) -- ✅ Parallelizable tasks marked with [P] -- ✅ User story tasks have [US#] label -- ✅ All tasks have specific file paths -- ✅ All tasks are actionable (clear description) - ---- - -## Parallel Execution Opportunities - -### Phase 1 (Setup): 13 Parallel Tasks -Tasks T002-T020 (excluding sequential dependencies) can run simultaneously. - -**Example Team Split:** -- Developer 1: Nix config (T002, T003, T004, T009, T317, T318) -- Developer 2: Backend setup (T005, T007, T011, T013, T015, T017, T018) -- Developer 3: Frontend setup (T006, T008, T012, T014, T016) - -### Phase 2 (Foundational): 10 Parallel Tasks -Tasks T021-T035 - most can run in parallel after T021-T024 complete. - -### Phase 3+ (User Stories): Full Parallelization -Each user story is independent after foundational phase: - -**Parallel Story Development (Example Week 9-12):** -- Team A: US9 (Alignment) + US12 (Export) -- Team B: US10 (Groups) + US13 (Quality) -- Team C: US11 (Sharing) - -All teams work simultaneously on different stories! - ---- - -## MVP Scope Recommendation - -For fastest time-to-market, implement in this order: - -### MVP Phase 1 (Weeks 1-8) - 120 Tasks -**Deliverable:** Functional reference board app - -- Phase 1-2: Setup (35 tasks) -- US1: Authentication (20 tasks) -- US2: Board Management (20 tasks) -- US3: Image Upload (24 tasks) -- US4-US5: Canvas basics (22 tasks) -- US6: Transformations (12 tasks) - -**Result:** Users can create boards, upload images, position and transform them. - -### MVP Phase 2 (Weeks 9-12) - 88 Tasks -**Deliverable:** Collaboration features - -- US7-US10: Multi-select, clipboard, alignment, groups (47 tasks) -- US11: Sharing (19 tasks) -- US12: Export (12 tasks) -- US13: Adaptive quality (10 tasks) - -**Result:** Full collaboration and export capabilities. - -### Polish Phase (Weeks 13-16) - 123 Tasks -**Deliverable:** Production-ready - -- US14-US18: Library, palette, focus, slideshow, arrange (41 tasks) -- Performance optimization (10 tasks) -- Testing (15 tasks) -- Accessibility (13 tasks) -- Deployment (23 tasks) -- Documentation (21 tasks) - -**Result:** Polished, tested, deployed application. - ---- - -## Independent Test Criteria - -Each user story phase includes independent test criteria that can be verified without other features: - -**Example (US1 - Authentication):** -- ✅ Users can register with valid email/password -- ✅ Users can login and receive JWT token -- ✅ Protected endpoints reject unauthenticated requests -- ✅ Password validation enforces complexity rules - -This enables: -- Feature flag rollouts (deploy incomplete features, hidden behind flags) -- A/B testing individual features -- Incremental beta releases -- Independent QA validation - ---- - -## Technology Stack Reference - -**All tasks reference this verified stack:** - -**Frontend:** -- Svelte + SvelteKit (framework) -- Konva.js (canvas library) -- Vite (build tool) -- Vitest + Testing Library (testing) - -**Backend:** -- FastAPI (web framework) -- SQLAlchemy + Alembic (database ORM + migrations) -- Pydantic (validation) -- Pillow + ImageMagick (image processing) -- pytest (testing) - -**Infrastructure:** -- PostgreSQL (database) -- MinIO (S3-compatible storage) -- Nginx (reverse proxy) -- Nix (deployment) - -**All verified in nixpkgs** - see VERIFICATION-COMPLETE.md - ---- - -## Next Actions - -### Immediate (Today) - -1. **Review tasks.md:** - ```bash - cat specs/001-reference-board-viewer/tasks.md - ``` - -2. **Understand the format:** - - [T###] = Task ID - - [P] = Parallelizable - - [US#] = User Story label - -3. **Choose approach:** - - Full MVP (120 tasks, Weeks 1-8) - - OR Complete v1.0 (331 tasks, Weeks 1-16) - -### This Week (Week 1) - -Start with Phase 1 (T001-T020): -```bash -# T001: Initialize Git structure -# T002: Create flake.nix -# T003: Update shell.nix -# ... follow tasks.md sequentially -``` - -### Team Organization - -If you have a team: -- **Backend Developer:** Focus on backend tasks in each phase -- **Frontend Developer:** Focus on frontend tasks in each phase -- **Full-Stack:** Can work on any tasks marked [P] - -If solo: -- Follow tasks sequentially (T001 → T002 → T003...) -- Skip tasks marked [P] in same phase to avoid context switching -- Complete one user story fully before moving to next - ---- - -## Files Created - -``` -specs/001-reference-board-viewer/ -├── tasks.md ✅ 331 tasks, 25 phases (THIS FILE) -├── plan.md ✅ 16-week implementation plan -├── spec.md ✅ 18 functional requirements -├── data-model.md ✅ Database schema -├── tech-research.md ✅ Technology analysis -├── nix-package-verification.md ✅ Package verification -├── VERIFICATION-COMPLETE.md ✅ Verification summary -├── PLANNING-COMPLETE.md ✅ Planning summary -├── TASKS-GENERATED.md ✅ This document -├── quickstart.md ✅ Developer guide -├── contracts/ -│ └── api.yaml ✅ OpenAPI 3.0 spec -└── checklists/ - └── requirements.md ✅ Quality validation - -Total: ~6,500 lines of comprehensive planning & task breakdown -``` - ---- - -## Conclusion - -✅ **Task Generation:** COMPLETE -✅ **Format Validation:** PASSED -✅ **Dependency Analysis:** MAPPED -✅ **Parallel Opportunities:** IDENTIFIED -✅ **MVP Scope:** DEFINED - -**Status:** ✅ READY TO BEGIN IMPLEMENTATION - -Start with T001 and work through sequentially, or split among team members using the parallel execution examples! - -🚀 **Let's build this!** - diff --git a/specs/001-reference-board-viewer/VERIFICATION-COMPLETE.md b/specs/001-reference-board-viewer/VERIFICATION-COMPLETE.md deleted file mode 100644 index a29fa8c..0000000 --- a/specs/001-reference-board-viewer/VERIFICATION-COMPLETE.md +++ /dev/null @@ -1,331 +0,0 @@ -# ✅ NIX PACKAGE VERIFICATION COMPLETE - -**Date:** 2025-11-02 -**Verification Method:** Direct nixpkgs search + nix-instantiate -**Result:** **100% VERIFIED - ALL PACKAGES AVAILABLE** - ---- - -## Summary - -Every component in the recommended technology stack has been verified to exist in nixpkgs or can be built with Nix-native tools. **No workarounds, custom derivations, or external package managers required.** - ---- - -## Backend Packages (Python) - ✅ ALL VERIFIED - -Verified via `nix search nixpkgs` and `nix-instantiate`: - -| Package | nixpkgs Attribute | Verified Command | Status | -|---------|-------------------|------------------|--------| -| **FastAPI** | `python3Packages.fastapi` | `nix search nixpkgs fastapi` | ✅ v0.115.12 | -| **Uvicorn** | `python3Packages.uvicorn` | Found in package list | ✅ Available | -| **SQLAlchemy** | `python3Packages.sqlalchemy` | Found in package list | ✅ Available | -| **Alembic** | `python3Packages.alembic` | Found in package list | ✅ Available | -| **Pydantic** | `python3Packages.pydantic` | Found in package list | ✅ Available | -| **python-jose** | `python3Packages.python-jose` | `nix search` confirmed | ✅ Available | -| **passlib** | `python3Packages.passlib` | `nix search` confirmed | ✅ Available | -| **Pillow** | `python3Packages.pillow` | Found in package list | ✅ Available | -| **boto3** | `python3Packages.boto3` | `nix search` confirmed | ✅ Available | -| **python-multipart** | `python3Packages.python-multipart` | `nix search` confirmed | ✅ Available | -| **httpx** | `python3Packages.httpx` | Found in package list | ✅ Available | -| **pytest** | `python3Packages.pytest` | Found in package list | ✅ Available | -| **pytest-cov** | `python3Packages.pytest-cov` | Found in package list | ✅ Available | -| **pytest-asyncio** | `python3Packages.pytest-asyncio` | Found in package list | ✅ Available | - -**Verification Command:** -```bash -nix-instantiate --eval -E 'with import {}; python3Packages.fastapi.pname' -# Output: "fastapi" ✅ -``` - ---- - -## System Packages - ✅ ALL VERIFIED - -| Package | nixpkgs Attribute | Verified Command | Status | -|---------|-------------------|------------------|--------| -| **PostgreSQL** | `pkgs.postgresql` | `nix search nixpkgs postgresql` | ✅ Multiple versions | -| **Nginx** | `pkgs.nginx` | `nix search nixpkgs nginx` | ✅ Available | -| **MinIO** | `pkgs.minio` | `nix search nixpkgs '^minio$'` | ✅ Available | -| **ImageMagick** | `pkgs.imagemagick` | `nix search nixpkgs imagemagick` | ✅ Available | -| **Node.js** | `pkgs.nodejs` | `nix search nixpkgs nodejs` | ✅ Multiple versions | -| **uv** | `pkgs.uv` | Already in your shell.nix | ✅ Available | - -**Verification Command:** -```bash -nix-instantiate --eval -E 'with import {}; [ postgresql.pname nginx.pname imagemagick.pname nodejs.pname ]' -# Output: [ "postgresql" "nginx" "imagemagick" "nodejs" ] ✅ -``` - ---- - -## Frontend Packages (npm) - ✅ FULLY SUPPORTED - -**Method:** `buildNpmPackage` (standard Nix tool for npm packages) - -| Package | Managed By | Integration Method | Status | -|---------|-----------|-------------------|--------| -| **Svelte** | npm | `buildNpmPackage` | ✅ Automatic | -| **SvelteKit** | npm | `buildNpmPackage` | ✅ Automatic | -| **Konva.js** | npm | `buildNpmPackage` | ✅ Automatic | -| **Vite** | npm | `buildNpmPackage` | ✅ Automatic | - -**How it works:** -```nix -pkgs.buildNpmPackage { - pname = "webref-frontend"; - src = ./frontend; - npmDepsHash = "sha256-..."; # Nix computes this - # Nix automatically: - # 1. Reads package.json - # 2. Fetches all npm dependencies - # 3. Builds reproducibly - # 4. Creates store entry -} -``` - -**No need for individual nixpkgs entries** - This is the **standard and recommended** approach in the Nix ecosystem. - ---- - -## NixOS Services - ✅ ALL AVAILABLE - -Verified via [search.nixos.org](https://search.nixos.org) and documentation: - -| Service | NixOS Module | Configuration | Status | -|---------|-------------|---------------|--------| -| **PostgreSQL** | `services.postgresql` | Full module with options | ✅ Available | -| **Nginx** | `services.nginx` | Full module with virtualHosts | ✅ Available | -| **MinIO** | `services.minio` | Full module with dataDir, etc | ✅ Available | - -**Example Configuration:** -```nix -{ - services.postgresql = { - enable = true; - package = pkgs.postgresql_16; - ensureDatabases = [ "webref" ]; - }; - - services.nginx = { - enable = true; - virtualHosts."webref.local" = { ... }; - }; - - services.minio = { - enable = true; - dataDir = "/var/lib/minio"; - }; -} -``` - -These are **pre-built, maintained NixOS modules** - no custom configuration needed! - ---- - -## Development Tools - ✅ ALL VERIFIED - -| Tool | nixpkgs Attribute | Purpose | Status | -|------|-------------------|---------|--------| -| **uv** | `pkgs.uv` | Python package manager (fast) | ✅ In your shell.nix | -| **ruff** | `pkgs.ruff` | Python linter | ✅ Available | -| **git** | `pkgs.git` | Version control | ✅ Standard | - ---- - -## Build Tools - ✅ VERIFIED - -| Tool | Integration | Purpose | Status | -|------|-----------|---------|--------| -| **buildPythonApplication** | Native Nix | Build Python apps | ✅ Built-in | -| **buildNpmPackage** | Native Nix | Build npm projects | ✅ Built-in | -| **mkShell** | Native Nix | Dev environments | ✅ Built-in | - ---- - -## Actual Verification Results - -### Python Packages -```bash -$ nix search nixpkgs 'python.*alembic|python.*passlib|python.*python-jose|python.*python-multipart' -"pname":"python3.12-alembic" ✅ -"pname":"python3.12-passlib" ✅ -"pname":"python3.12-python-jose" ✅ -"pname":"python3.12-python-multipart" ✅ -"pname":"python3.13-alembic" ✅ -"pname":"python3.13-passlib" ✅ -"pname":"python3.13-python-jose" ✅ -"pname":"python3.13-python-multipart" ✅ -``` - -### System Packages -```bash -$ nix search nixpkgs '^minio$' -legacyPackages.x86_64-linux.minio ✅ -legacyPackages.x86_64-linux.minio_legacy_fs ✅ -``` - -### FastAPI -```bash -$ nix search nixpkgs fastapi --json | jq '.[] | select(.pname == "python3.12-fastapi")' -{ - "description": "Web framework for building APIs", - "pname": "python3.12-fastapi", - "version": "0.115.12" -} ✅ -``` - ---- - -## Complete Working shell.nix - -Here's a **tested, working configuration** using only verified packages: - -```nix -{ pkgs ? import { } }: - -pkgs.mkShell { - packages = [ - # Backend: Python with all verified packages - (pkgs.python3.withPackages (ps: [ - ps.fastapi # ✅ Verified - ps.uvicorn # ✅ Verified - ps.sqlalchemy # ✅ Verified - ps.alembic # ✅ Verified - ps.pydantic # ✅ Verified - ps.python-jose # ✅ Verified - ps.passlib # ✅ Verified - ps.pillow # ✅ Verified - ps.boto3 # ✅ Verified - ps.python-multipart # ✅ Verified - ps.httpx # ✅ Verified - ps.pytest # ✅ Verified - ps.pytest-cov # ✅ Verified - ps.pytest-asyncio # ✅ Verified - ])) - - # Python package manager (already in your shell.nix) - pkgs.uv # ✅ Verified - - # Image processing - pkgs.imagemagick # ✅ Verified - - # Frontend - pkgs.nodejs # ✅ Verified (npm included) - - # Database - pkgs.postgresql # ✅ Verified - - # Development - pkgs.ruff # ✅ Verified - pkgs.git # ✅ Standard - ]; - - shellHook = '' - echo "✅ All packages verified and loaded!" - echo "Python: $(python --version)" - echo "Node: $(node --version)" - echo "PostgreSQL client: $(psql --version)" - ''; -} -``` - -You can test this **right now**: -```bash -nix-shell -p 'python3.withPackages (ps: [ ps.fastapi ps.uvicorn ps.sqlalchemy ])' \ - -p nodejs -p postgresql -p imagemagick -p uv --run 'echo "✅ Success!"' -``` - ---- - -## Example flake.nix - -A complete, working Nix flake using verified packages: - -```nix -{ - description = "webref - Reference Board Viewer"; - - inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.05"; - - outputs = { self, nixpkgs }: - let - system = "x86_64-linux"; - pkgs = nixpkgs.legacyPackages.${system}; - - # Backend Python packages (all verified ✅) - pythonEnv = pkgs.python3.withPackages (ps: [ - ps.fastapi ps.uvicorn ps.sqlalchemy ps.alembic - ps.pydantic ps.python-jose ps.passlib ps.pillow - ps.boto3 ps.python-multipart ps.httpx - ]); - - in { - # Development shell - devShells.${system}.default = pkgs.mkShell { - packages = [ - pythonEnv - pkgs.uv - pkgs.nodejs - pkgs.imagemagick - pkgs.postgresql - pkgs.ruff - ]; - }; - - # NixOS module for deployment - nixosModules.default = { config, lib, ... }: { - options.services.webref.enable = lib.mkEnableOption "webref"; - - config = lib.mkIf config.services.webref.enable { - # All these services are verified ✅ - services.postgresql.enable = true; - services.minio.enable = true; - services.nginx.enable = true; - }; - }; - }; -} -``` - ---- - -## Conclusion - -### ✅ Verification Status: 100% COMPLETE - -**Every single component** in the recommended stack exists in nixpkgs or is built using standard Nix tools: - -1. ✅ **Backend (Python):** All 14 packages verified in `python3Packages.*` -2. ✅ **System Services:** PostgreSQL, Nginx, MinIO all verified -3. ✅ **Frontend (npm):** Handled by standard `buildNpmPackage` -4. ✅ **Image Processing:** Pillow, ImageMagick verified -5. ✅ **Development Tools:** uv, ruff, git all verified -6. ✅ **NixOS Modules:** services.postgresql, services.nginx, services.minio all available - -### No Issues Found - -- ❌ No packages missing from nixpkgs -- ❌ No custom derivations needed -- ❌ No workarounds required -- ❌ No external package managers needed (beyond npm via buildNpmPackage) - -### Your Non-Negotiable Requirement: ✅ MET - -**"Must be deployable and compilable by Nix"** → **Fully satisfied.** - -The recommended stack (Svelte + Konva + FastAPI + PostgreSQL + MinIO) is: -- **100% reproducible** with Nix -- **Battle-tested** in production NixOS environments -- **Standard** in the Nix ecosystem -- **Well-maintained** by nixpkgs contributors - ---- - -## Next Action - -You can confidently **proceed with implementation** using the recommended stack. Everything is verified and ready to go! - -See the complete [tech-research.md](./tech-research.md) for detailed analysis and [plan.md](./plan.md) for the 16-week implementation timeline. - diff --git a/specs/001-reference-board-viewer/tasks.md b/specs/001-reference-board-viewer/tasks.md index 7c09585..ec8e313 100644 --- a/specs/001-reference-board-viewer/tasks.md +++ b/specs/001-reference-board-viewer/tasks.md @@ -89,15 +89,15 @@ Implementation tasks for the Reference Board Viewer, organized by user story (fu --- -## Phase 3: User Authentication (FR1 - Critical) (Week 2) +## Phase 3: User Authentication (FR1 - Critical) (Week 2) ✅ COMPLETE **User Story:** Users must be able to create accounts, log in, and manage their profile **Independent Test Criteria:** -- [ ] Users can register with valid email/password -- [ ] Users can login and receive JWT token -- [ ] Protected endpoints reject unauthenticated requests -- [ ] Password validation enforces complexity rules +- [X] Users can register with valid email/password +- [X] Users can login and receive JWT token +- [X] Protected endpoints reject unauthenticated requests +- [X] Password validation enforces complexity rules **Backend Tasks:**