diff --git a/.changeset/red-wasps-kick.md b/.changeset/red-wasps-kick.md new file mode 100644 index 00000000..897c3607 --- /dev/null +++ b/.changeset/red-wasps-kick.md @@ -0,0 +1,5 @@ +--- +"varlock": minor +--- + +add new varlock() function for built-in encryption diff --git a/.github/workflows/binary-release.yaml b/.github/workflows/binary-release.yaml index 29bcaefd..9e7e70f1 100644 --- a/.github/workflows/binary-release.yaml +++ b/.github/workflows/binary-release.yaml @@ -12,6 +12,9 @@ on: release: types: [published] +permissions: + contents: read + concurrency: ${{ github.workflow }}-${{ github.ref }} jobs: @@ -24,10 +27,42 @@ jobs: run: | echo "$GITHUB_CONTEXT" + # Build and sign the macOS native binary (cache hit if already built in CI) + build-native-macos: + if: github.event_name == 'workflow_dispatch' || startsWith(github.ref_name, 'varlock@') + uses: ./.github/workflows/build-native-macos.yaml + with: + mode: release + version: ${{ github.event_name == 'workflow_dispatch' && inputs.version || github.ref_name }} + artifact-name: native-bin-macos-signed + secrets: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + # Notarize the signed binary for production distribution + notarize-native-macos: + needs: build-native-macos + if: github.event_name == 'workflow_dispatch' || startsWith(github.ref_name, 'varlock@') + uses: ./.github/workflows/notarize-native-macos.yaml + with: + source-artifact-name: native-bin-macos-signed + artifact-name: native-bin-macos-release + secrets: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + # Build Rust native binaries for Linux and Windows + build-native-rust: + if: github.event_name == 'workflow_dispatch' || startsWith(github.ref_name, 'varlock@') + uses: ./.github/workflows/build-native-rust.yaml + with: + artifact-name: native-bin-rust + release-binaries: + needs: [notarize-native-macos, build-native-rust] # was using github.ref.tag_name, but it seems that when publishing multiple tags at once, it was behaving weirdly if: github.event_name == 'workflow_dispatch' || startsWith(github.ref_name, 'varlock@') runs-on: ubuntu-latest + permissions: + contents: write steps: - uses: actions/checkout@v6 - name: Setup Bun @@ -63,6 +98,36 @@ jobs: echo "RELEASE_TAG=varlock@${{ inputs.version }}" >> $GITHUB_ENV echo "RELEASE_VERSION=${{ inputs.version }}" >> $GITHUB_ENV + # Download the signed macOS native binary + - name: Download macOS native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-macos-release + path: packages/varlock/native-bins/darwin/VarlockEnclave.app + - name: Restore native binary execute permission + run: chmod +x packages/varlock/native-bins/darwin/VarlockEnclave.app/Contents/MacOS/varlock-local-encrypt + + # Download Rust native binaries for Linux and Windows + - name: Download Linux x64 native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-rust-linux-x64 + path: packages/varlock/native-bins/linux-x64 + - name: Download Linux arm64 native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-rust-linux-arm64 + path: packages/varlock/native-bins/linux-arm64 + - name: Download Windows x64 native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-rust-win32-x64 + path: packages/varlock/native-bins/win32-x64 + - name: Restore Rust binary execute permissions + run: | + chmod +x packages/varlock/native-bins/linux-x64/varlock-local-encrypt + chmod +x packages/varlock/native-bins/linux-arm64/varlock-local-encrypt + - name: build libs run: bun run build:libs env: diff --git a/.github/workflows/build-native-macos.yaml b/.github/workflows/build-native-macos.yaml new file mode 100644 index 00000000..b9e236a3 --- /dev/null +++ b/.github/workflows/build-native-macos.yaml @@ -0,0 +1,219 @@ +name: Build macOS native binary + +# Reusable workflow that compiles, bundles, and Developer ID signs the +# VarlockEnclave Swift binary on a macOS runner. +# +# The Swift .build directory is cached by source hash, so the compile +# step (~minutes) is near-instant on cache hit. The .app bundle wrapping +# (plist, icon, signing) always runs since it varies by mode/version. +# +# Notarization is intentionally NOT included here — it's a separate +# workflow for production releases. + +permissions: + contents: read + +on: + workflow_call: + inputs: + mode: + description: 'Build mode: dev, preview, or release (affects bundle metadata)' + type: string + default: 'preview' + version: + description: 'Bundle version string (e.g. 1.2.3)' + type: string + default: '0.0.0-preview' + artifact-name: + description: 'Name for the uploaded artifact' + type: string + default: 'native-bin-macos' + secrets: + OP_CI_TOKEN: + required: true + +jobs: + build-swift-binary: + runs-on: macos-latest + steps: + - uses: actions/checkout@v6 + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + + # skip bun dep caching since less likely to hit + + - name: Install node deps + run: bun install + + - name: Enable turborepo build cache + uses: rharkor/caching-for-turbo@v2.3.11 + + # Cache the Swift .build directory so compilation is fast on unchanged source + - name: Compute Swift source hash + id: swift-hash + run: | + HASH=$(find packages/encryption-binary-swift/swift -type f | sort | xargs shasum -a 256 | shasum -a 256 | cut -d' ' -f1) + echo "hash=$HASH" >> $GITHUB_OUTPUT + echo "Swift source hash: $HASH" + + - name: Cache Swift build artifacts + uses: actions/cache@v5 + with: + path: packages/encryption-binary-swift/swift/.build + key: varlock-swift-build-${{ steps.swift-hash.outputs.hash }} + + # Build varlock JS so we can use it to resolve secrets from 1Password + - name: Build varlock libs + run: bun run build:libs + + # Load secrets from 1Password via varlock (scoped to the Swift package) + - name: Load signing secrets + uses: dmno-dev/varlock-action@v1.0.1 + with: + working-directory: packages/encryption-binary-swift + env: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + # Import signing certificate into a temporary keychain + - name: Import signing certificate + run: | + KEYCHAIN_PATH=$RUNNER_TEMP/signing.keychain-db + KEYCHAIN_PASSWORD=$(openssl rand -base64 24) + + echo "$APPLE_CERTIFICATE_BASE64" | base64 --decode > $RUNNER_TEMP/certificate.p12 + + security create-keychain -p "$KEYCHAIN_PASSWORD" "$KEYCHAIN_PATH" + security set-keychain-settings -lut 21600 "$KEYCHAIN_PATH" + security unlock-keychain -p "$KEYCHAIN_PASSWORD" "$KEYCHAIN_PATH" + + security import $RUNNER_TEMP/certificate.p12 \ + -P "$APPLE_CERTIFICATE_PASSWORD" \ + -A -t cert -f pkcs12 \ + -k "$KEYCHAIN_PATH" + + security set-key-partition-list -S apple-tool:,apple:,codesign: \ + -s -k "$KEYCHAIN_PASSWORD" "$KEYCHAIN_PATH" + + security list-keychains -d user -s "$KEYCHAIN_PATH" login.keychain-db + + echo "APPLE_SIGNING_IDENTITY=$APPLE_SIGNING_IDENTITY" >> $GITHUB_ENV + + # Compile (cached), bundle with mode-specific metadata, and sign + - name: Build, bundle, and sign + run: | + bun run --filter @varlock/encryption-binary-swift build:swift \ + -- --mode ${{ inputs.mode }} --version ${{ inputs.version }} --sign "$APPLE_SIGNING_IDENTITY" + + - name: Verify binary + run: | + APP_PATH="packages/varlock/native-bins/darwin/VarlockEnclave.app" + echo "=== App bundle contents ===" + ls -la "$APP_PATH/Contents/MacOS/" + echo "=== Binary architectures ===" + lipo -info "$APP_PATH/Contents/MacOS/varlock-local-encrypt" + echo "=== Code signature ===" + codesign -dvv "$APP_PATH" 2>&1 || true + echo "=== Info.plist ===" + cat "$APP_PATH/Contents/Info.plist" + + # Test the binary (using --no-auth since CI has no biometric) + # Keys are still Secure Enclave-backed, just without user presence requirement + - name: Test binary - status + run: | + BIN="packages/varlock/native-bins/darwin/VarlockEnclave.app/Contents/MacOS/varlock-local-encrypt" + echo "=== status ===" + $BIN status + $BIN status | python3 -c "import sys,json; d=json.load(sys.stdin); assert d['ok'], 'status not ok'" + + - name: Test binary - SE key lifecycle + encrypt/decrypt roundtrip + run: | + BIN="packages/varlock/native-bins/darwin/VarlockEnclave.app/Contents/MacOS/varlock-local-encrypt" + + echo "=== generate-key (--no-auth for CI) ===" + $BIN generate-key --key-id ci-test --no-auth + + echo "=== key-exists ===" + $BIN key-exists --key-id ci-test | python3 -c "import sys,json; d=json.load(sys.stdin); assert d['exists']" + + echo "=== encrypt ===" + PLAINTEXT=$(printf 'hello from macOS CI' | base64) + CIPHERTEXT=$($BIN encrypt --key-id ci-test --data "$PLAINTEXT" | python3 -c "import sys,json; print(json.load(sys.stdin)['ciphertext'])") + echo "Ciphertext: ${CIPHERTEXT:0:40}..." + + echo "=== decrypt (one-shot, no auth needed) ===" + DECRYPTED=$($BIN decrypt --key-id ci-test --data "$CIPHERTEXT" | python3 -c "import sys,json; print(json.load(sys.stdin)['plaintext'])") + echo "Decrypted: $DECRYPTED" + + if [ "$DECRYPTED" != "hello from macOS CI" ]; then + echo "::error::Roundtrip failed! Expected 'hello from macOS CI', got '$DECRYPTED'" + exit 1 + fi + + echo "=== delete-key ===" + $BIN delete-key --key-id ci-test + + echo "All macOS binary tests passed" + + - name: Test JS→Swift interop + run: | + BIN="packages/varlock/native-bins/darwin/VarlockEnclave.app/Contents/MacOS/varlock-local-encrypt" + + # Generate SE key (no auth for CI) + $BIN generate-key --key-id interop-test --no-auth + + # Get the public key from the SE binary + PUBLIC_KEY=$($BIN generate-key --key-id interop-tmp --no-auth > /dev/null 2>&1; echo "skip") + # Actually, get public key by generating and reading the output + GEN_OUTPUT=$($BIN key-exists --key-id interop-test) + + # Use the SE binary's encrypt to get the public key indirectly: + # generate-key already printed it — let's re-generate to capture it + $BIN delete-key --key-id interop-test > /dev/null + PUBLIC_KEY=$($BIN generate-key --key-id interop-test --no-auth | python3 -c "import sys,json; print(json.load(sys.stdin)['publicKey'])") + echo "SE Public Key: ${PUBLIC_KEY:0:20}..." + + # Encrypt with JS using the SE public key + CIPHERTEXT=$(bun -e " + const { encrypt } = await import('./packages/varlock/src/lib/local-encrypt/crypto.ts'); + const result = await encrypt('$PUBLIC_KEY', 'javascript to secure enclave'); + process.stdout.write(result); + ") + echo "JS Ciphertext: ${CIPHERTEXT:0:40}..." + + # Decrypt with Swift SE binary (proves JS wire format is SE-compatible) + DECRYPTED=$($BIN decrypt --key-id interop-test --data "$CIPHERTEXT" | python3 -c "import sys,json; print(json.load(sys.stdin)['plaintext'])") + + if [ "$DECRYPTED" != "javascript to secure enclave" ]; then + echo "::error::JS→Swift interop failed! Got: $DECRYPTED" + exit 1 + fi + echo "✓ JS→Swift SE: '$DECRYPTED'" + + # Cleanup + $BIN delete-key --key-id interop-test + echo "All macOS interop tests passed" + + - name: Upload native binary artifact + uses: actions/upload-artifact@v7 + with: + name: ${{ inputs.artifact-name }} + path: packages/varlock/native-bins/darwin/VarlockEnclave.app + retention-days: 7 + + # Cache the signed .app so other jobs (e.g. release-preview) can restore + # it on a Linux runner without needing a macOS build + - name: Cache signed .app bundle + uses: actions/cache/save@v5 + with: + path: packages/varlock/native-bins/darwin/VarlockEnclave.app + key: native-bin-macos-signed-${{ hashFiles('packages/encryption-binary-swift/swift/**') }} + + - name: Cleanup signing keychain + if: always() + run: | + KEYCHAIN_PATH=$RUNNER_TEMP/signing.keychain-db + if [ -f "$KEYCHAIN_PATH" ]; then + security delete-keychain "$KEYCHAIN_PATH" || true + fi + rm -f $RUNNER_TEMP/certificate.p12 diff --git a/.github/workflows/build-native-rust.yaml b/.github/workflows/build-native-rust.yaml new file mode 100644 index 00000000..f0efa84f --- /dev/null +++ b/.github/workflows/build-native-rust.yaml @@ -0,0 +1,211 @@ +name: Build Rust native binaries + +# Reusable workflow that compiles the varlock-local-encrypt Rust binary +# for Linux and Windows targets. +# +# Builds are cached by Cargo.lock + source hash. Each platform builds +# natively on its own runner for maximum compatibility. +# +# Output: native binaries uploaded as artifacts, ready to be bundled +# into the varlock npm package and CLI release archives. + +permissions: + contents: read + +on: + workflow_call: + inputs: + artifact-name: + description: 'Base name for uploaded artifacts (suffixed with platform)' + type: string + default: 'native-bin-rust' + +jobs: + build: + strategy: + matrix: + include: + - os: ubuntu-latest + target: x86_64-unknown-linux-gnu + native-bin-subdir: linux-x64 + binary-name: varlock-local-encrypt + - os: ubuntu-24.04-arm + target: aarch64-unknown-linux-gnu + native-bin-subdir: linux-arm64 + binary-name: varlock-local-encrypt + - os: windows-latest + target: x86_64-pc-windows-msvc + native-bin-subdir: win32-x64 + binary-name: varlock-local-encrypt.exe + + runs-on: ${{ matrix.os }} + name: Build ${{ matrix.native-bin-subdir }} + + steps: + - uses: actions/checkout@v6 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + with: + targets: ${{ matrix.target }} + + # Cache Cargo registry + build artifacts by lockfile hash + - name: Cache Cargo + uses: actions/cache@v5 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + packages/encryption-binary-rust/target + key: rust-${{ matrix.target }}-${{ hashFiles('packages/encryption-binary-rust/Cargo.lock') }}-${{ hashFiles('packages/encryption-binary-rust/src/**') }} + restore-keys: | + rust-${{ matrix.target }}-${{ hashFiles('packages/encryption-binary-rust/Cargo.lock') }}- + rust-${{ matrix.target }}- + + - name: Build release binary + working-directory: packages/encryption-binary-rust + run: cargo build --release --target ${{ matrix.target }} + + - name: Prepare artifact + shell: bash + run: | + ARTIFACT_DIR="native-bins/${{ matrix.native-bin-subdir }}" + mkdir -p "$ARTIFACT_DIR" + cp "packages/encryption-binary-rust/target/${{ matrix.target }}/release/${{ matrix.binary-name }}" "$ARTIFACT_DIR/" + echo "=== Binary info ===" + ls -la "$ARTIFACT_DIR/${{ matrix.binary-name }}" + file "$ARTIFACT_DIR/${{ matrix.binary-name }}" || true + + # Run Rust unit tests + - name: Run unit tests + working-directory: packages/encryption-binary-rust + run: cargo test --release --target ${{ matrix.target }} + + # Test the built binary end-to-end (one-shot commands, no biometric) + # Uses python (not python3) for Windows compat; printf for reliable base64 input + - name: Test binary - status + shell: bash + run: | + BIN="native-bins/${{ matrix.native-bin-subdir }}/${{ matrix.binary-name }}" + PY=$(command -v python3 || command -v python) + echo "=== status ===" + $BIN status + $BIN status | $PY -c "import sys,json; d=json.load(sys.stdin); assert d['ok'], 'status not ok'" + + - name: Test binary - key lifecycle + encrypt/decrypt roundtrip + shell: bash + run: | + BIN="native-bins/${{ matrix.native-bin-subdir }}/${{ matrix.binary-name }}" + PY=$(command -v python3 || command -v python) + + echo "=== generate-key ===" + $BIN generate-key --key-id ci-test + $BIN key-exists --key-id ci-test | $PY -c "import sys,json; d=json.load(sys.stdin); assert d['exists']" + + echo "=== encrypt ===" + PLAINTEXT=$($PY -c "import base64; print(base64.b64encode(b'hello from CI').decode())") + CIPHERTEXT=$($BIN encrypt --key-id ci-test --data "$PLAINTEXT" | $PY -c "import sys,json; print(json.load(sys.stdin)['ciphertext'])") + echo "Ciphertext: ${CIPHERTEXT:0:40}..." + + echo "=== decrypt ===" + DECRYPTED=$($BIN decrypt --key-id ci-test --data "$CIPHERTEXT" | $PY -c "import sys,json; print(json.load(sys.stdin)['plaintext'])") + echo "Decrypted: $DECRYPTED" + + if [ "$DECRYPTED" != "hello from CI" ]; then + echo "::error::Roundtrip failed! Expected 'hello from CI', got '$DECRYPTED'" + exit 1 + fi + + echo "=== delete-key ===" + $BIN delete-key --key-id ci-test + + echo "All binary tests passed" + + - name: Upload artifact + uses: actions/upload-artifact@v7 + with: + name: ${{ inputs.artifact-name }}-${{ matrix.native-bin-subdir }} + path: native-bins/${{ matrix.native-bin-subdir }}/ + retention-days: 7 + + # Cache the built binary so preview releases on other runners can restore it + - name: Cache built binary + uses: actions/cache/save@v5 + with: + path: native-bins/${{ matrix.native-bin-subdir }}/ + key: native-bin-rust-${{ matrix.native-bin-subdir }}-${{ hashFiles('packages/encryption-binary-rust/Cargo.lock', 'packages/encryption-binary-rust/src/**') }} + + # Cross-platform interop: encrypt with Rust on Linux, decrypt with JS, and vice versa + test-interop: + needs: build + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + + - name: Install deps + run: bun install + + - name: Download Linux x64 binary + uses: actions/download-artifact@v7 + with: + name: ${{ inputs.artifact-name }}-linux-x64 + path: native-bins/linux-x64 + + - name: Fix permissions + run: chmod +x native-bins/linux-x64/varlock-local-encrypt + + - name: Test Rust→JS interop + run: | + BIN=native-bins/linux-x64/varlock-local-encrypt + + # Generate a key with Rust + $BIN generate-key --key-id interop-test + KEY_JSON=$(cat ~/.config/varlock/local-encrypt/keys/interop-test.json) + PUBLIC_KEY=$(echo "$KEY_JSON" | python3 -c "import sys,json; print(json.load(sys.stdin)['publicKey'])") + PRIVATE_KEY=$(echo "$KEY_JSON" | python3 -c "import sys,json; print(json.load(sys.stdin)['protectedPrivateKey'])") + + # Encrypt with Rust + PLAINTEXT_B64=$(echo -n "rust to javascript" | base64) + CIPHERTEXT=$($BIN encrypt --key-id interop-test --data "$PLAINTEXT_B64" | python3 -c "import sys,json; print(json.load(sys.stdin)['ciphertext'])") + + # Decrypt with JS + RESULT=$(bun -e " + const { decrypt } = await import('./packages/varlock/src/lib/local-encrypt/crypto.ts'); + const result = await decrypt('$PRIVATE_KEY', '$PUBLIC_KEY', '$CIPHERTEXT'); + process.stdout.write(result); + ") + + if [ "$RESULT" != "rust to javascript" ]; then + echo "::error::Rust→JS interop failed! Got: $RESULT" + exit 1 + fi + echo "✓ Rust→JS: '$RESULT'" + + - name: Test JS→Rust interop + run: | + BIN=native-bins/linux-x64/varlock-local-encrypt + KEY_JSON=$(cat ~/.config/varlock/local-encrypt/keys/interop-test.json) + PUBLIC_KEY=$(echo "$KEY_JSON" | python3 -c "import sys,json; print(json.load(sys.stdin)['publicKey'])") + + # Encrypt with JS + CIPHERTEXT=$(bun -e " + const { encrypt } = await import('./packages/varlock/src/lib/local-encrypt/crypto.ts'); + const result = await encrypt('$PUBLIC_KEY', 'javascript to rust'); + process.stdout.write(result); + ") + + # Decrypt with Rust + RESULT=$($BIN decrypt --key-id interop-test --data "$CIPHERTEXT" | python3 -c "import sys,json; print(json.load(sys.stdin)['plaintext'])") + + if [ "$RESULT" != "javascript to rust" ]; then + echo "::error::JS→Rust interop failed! Got: $RESULT" + exit 1 + fi + echo "✓ JS→Rust: '$RESULT'" + + # Cleanup + $BIN delete-key --key-id interop-test + echo "✓ All cross-platform interop tests passed" diff --git a/.github/workflows/notarize-native-macos.yaml b/.github/workflows/notarize-native-macos.yaml new file mode 100644 index 00000000..8169a7c2 --- /dev/null +++ b/.github/workflows/notarize-native-macos.yaml @@ -0,0 +1,91 @@ +name: Notarize macOS native binary + +# Reusable workflow that takes an already-signed .app bundle artifact, +# submits it to Apple for notarization, and staples the ticket. +# Requires a macOS runner for xcrun. + +permissions: + contents: read + +on: + workflow_call: + inputs: + source-artifact-name: + description: 'Name of the signed .app artifact to notarize' + type: string + required: true + artifact-name: + description: 'Name for the notarized artifact' + type: string + default: 'native-bin-macos-notarized' + secrets: + OP_CI_TOKEN: + required: true + +jobs: + notarize: + runs-on: macos-latest + steps: + - uses: actions/checkout@v6 + + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + + # skip bun dep caching since less likely to hit + + - name: Install node deps + run: bun install + + - name: Enable turborepo build cache + uses: rharkor/caching-for-turbo@v2.3.11 + + - name: Build varlock libs + run: bun run build:libs + + - name: Download signed .app bundle + uses: actions/download-artifact@v8 + with: + name: ${{ inputs.source-artifact-name }} + path: VarlockEnclave.app + + # Load secrets from 1Password via varlock (scoped to the Swift package) + - name: Load signing secrets + uses: dmno-dev/varlock-action@v1.0.1 + with: + working-directory: packages/encryption-binary-swift + env: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + - name: Notarize and staple + working-directory: packages/encryption-binary-swift + run: | + APP_PATH="$GITHUB_WORKSPACE/VarlockEnclave.app" + + # Create a zip for notarization submission + ditto -c -k --keepParent "$APP_PATH" $RUNNER_TEMP/VarlockEnclave.zip + + # Submit for notarization and wait + xcrun notarytool submit $RUNNER_TEMP/VarlockEnclave.zip \ + --apple-id "$APPLE_ID" \ + --password "$APPLE_APP_PASSWORD" \ + --team-id "$APPLE_TEAM_ID" \ + --wait + + # Staple the notarization ticket to the app bundle + xcrun stapler staple "$APP_PATH" + env: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + - name: Verify notarization + run: | + echo "=== Code signature ===" + codesign -dvv VarlockEnclave.app 2>&1 || true + echo "=== Notarization staple ===" + xcrun stapler validate VarlockEnclave.app + + - name: Upload notarized artifact + uses: actions/upload-artifact@v7 + with: + name: ${{ inputs.artifact-name }} + path: VarlockEnclave.app + retention-days: 7 diff --git a/.github/workflows/release-preview.yaml b/.github/workflows/release-preview.yaml deleted file mode 100644 index 5859e5ca..00000000 --- a/.github/workflows/release-preview.yaml +++ /dev/null @@ -1,46 +0,0 @@ -name: Release _preview_ packages -on: - pull_request: - push: - branches-ignore: - - main - - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v6 - with: - # by default only the current commit is fetched - # but we need more history to be able to compare to main - # TODO: ideally we would just fetch the history between origin/main and the current commit - fetch-depth: 0 - - name: Setup Bun - uses: oven-sh/setup-bun@v2 - - name: Cache bun dependencies - uses: actions/cache@v5 - with: - path: ~/.bun/install/cache - key: bun-${{ runner.os }}-${{ hashFiles('bun.lock') }} - restore-keys: | - bun-${{ runner.os }}- - - name: Use Node.js 24.x - uses: actions/setup-node@v6 - with: - node-version: "24.x" - - name: Install node deps - run: bun install - - name: Enable turborepo build cache - uses: rharkor/caching-for-turbo@v2.3.11 - - # ------------------------------------------------------------ - - name: Build publishable npm packages - run: bun run build:libs - env: - BUILD_TYPE: preview - # we use a custom script to run `npx pkg-pr-new publish` - # so that we can determine which packages to release - - name: Release preview packages - run: bun run scripts/release-preview.ts diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 66143316..69c4f9dc 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -5,11 +5,40 @@ on: branches: - main +permissions: + contents: read + concurrency: ${{ github.workflow }}-${{ github.ref }} jobs: + # Build and sign the macOS native binary (cache hit if already built in CI) + build-native-macos: + uses: ./.github/workflows/build-native-macos.yaml + with: + mode: release + artifact-name: native-bin-macos-signed + secrets: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + # Notarize for production npm distribution + notarize-native-macos: + needs: build-native-macos + uses: ./.github/workflows/notarize-native-macos.yaml + with: + source-artifact-name: native-bin-macos-signed + artifact-name: native-bin-macos-npm + secrets: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + # Build Rust native binaries for Linux and Windows + build-native-rust: + uses: ./.github/workflows/build-native-rust.yaml + with: + artifact-name: native-bin-rust + release: name: Release + needs: [notarize-native-macos, build-native-rust] runs-on: ubuntu-latest permissions: id-token: write # Required for OIDC @@ -45,6 +74,36 @@ jobs: - name: Update npm run: npm install -g npm@latest + # Download signed macOS native binary so it's included in the npm package + - name: Download macOS native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-macos-npm + path: packages/varlock/native-bins/darwin/VarlockEnclave.app + - name: Restore native binary execute permission + run: chmod +x packages/varlock/native-bins/darwin/VarlockEnclave.app/Contents/MacOS/varlock-local-encrypt + + # Download Rust native binaries for Linux and Windows + - name: Download Linux x64 native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-rust-linux-x64 + path: packages/varlock/native-bins/linux-x64 + - name: Download Linux arm64 native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-rust-linux-arm64 + path: packages/varlock/native-bins/linux-arm64 + - name: Download Windows x64 native binary + uses: actions/download-artifact@v8 + with: + name: native-bin-rust-win32-x64 + path: packages/varlock/native-bins/win32-x64 + - name: Restore Rust binary execute permissions + run: | + chmod +x packages/varlock/native-bins/linux-x64/varlock-local-encrypt + chmod +x packages/varlock/native-bins/linux-arm64/varlock-local-encrypt + # ------------------------------------------------------------ - name: Create Release Pull Request or Publish to npm id: changesets diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index dddb41ba..89c6be41 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -2,19 +2,26 @@ name: CI test suite on: pull_request: push: + branches: [main] +permissions: + contents: read jobs: - build: + build-and-test: runs-on: ubuntu-latest - + outputs: + swift-changed: ${{ steps.check-swift.outputs.changed }} + rust-changed: ${{ steps.check-rust.outputs.changed }} steps: - uses: actions/checkout@v6 + with: + fetch-depth: 0 - name: Setup Bun uses: oven-sh/setup-bun@v2 + # this caching step is kind of a wash - # downloading the cache adds a few seconds - # and then installing is a bit faster + # downloading the cache adds a few seconds and then installing is a bit faster - name: Cache bun dependencies uses: actions/cache@v5 with: @@ -28,22 +35,10 @@ jobs: node-version: "24.x" - name: Install js deps (w/ bun) run: bun install - - name: Dogfood varlock-action - id: varlock - uses: dmno-dev/varlock-action@v1.0.1 - with: - working-directory: smoke-tests/smoke-test-basic - show-summary: 'false' - fail-on-error: 'true' - output-format: 'json' - - name: Verify varlock-action output - run: | - test -n "${{ steps.varlock.outputs.json-env }}" - echo "Varlock action output is present" - name: Enable turborepo build cache uses: rharkor/caching-for-turbo@v2.3.11 - # ------------------------------------------------------------ + # lint, build, tests --------------------------------- - name: ESLint run: bun run lint - name: TypeScript type check @@ -52,3 +47,156 @@ jobs: run: bun run build:libs - name: Run tests run: bun run test:ci + + # Check if native binary source changed (used to gate native builds) + - name: Check for Swift source changes + id: check-swift + run: | + if git diff --name-only origin/main...HEAD | grep -q '^packages/encryption-binary-swift/'; then + echo "changed=true" >> $GITHUB_OUTPUT + else + echo "changed=false" >> $GITHUB_OUTPUT + fi + - name: Check for Rust source changes + id: check-rust + run: | + if git diff --name-only origin/main...HEAD | grep -q '^packages/encryption-binary-rust/'; then + echo "changed=true" >> $GITHUB_OUTPUT + else + echo "changed=false" >> $GITHUB_OUTPUT + fi + + # Build + sign the macOS native binary if Swift source changed (warms the cache) + # this must be done on a mac-os runner + build-native-macos: + needs: build-and-test + if: needs.build-and-test.outputs.swift-changed == 'true' + uses: ./.github/workflows/build-native-macos.yaml + with: + artifact-name: native-bin-macos-ci + secrets: + OP_CI_TOKEN: ${{ secrets.OP_CI_TOKEN }} + + # Build Rust native binaries if source changed (warms the cache) + build-native-rust: + needs: build-and-test + if: needs.build-and-test.outputs.rust-changed == 'true' + uses: ./.github/workflows/build-native-rust.yaml + with: + artifact-name: native-bin-rust-ci + + # Publish preview packages via pkg-pr-new + release-preview-packages: + needs: [build-and-test, build-native-macos, build-native-rust] + # Run even if native builds were skipped (source unchanged), but not if anything failed. Skip on main. + if: always() && !failure() && !cancelled() && github.ref_name != 'main' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + - name: Cache bun dependencies + uses: actions/cache@v5 + with: + path: ~/.bun/install/cache + key: bun-${{ runner.os }}-${{ hashFiles('bun.lock') }} + restore-keys: | + bun-${{ runner.os }}- + - name: Use Node.js 24.x + uses: actions/setup-node@v6 + with: + node-version: "24.x" + - name: Install node deps + run: bun install + - name: Enable turborepo build cache + uses: rharkor/caching-for-turbo@v2.3.11 + + # Determine which packages will be preview-released + - name: Check release packages + id: check-release + run: bun run scripts/check-release-packages.ts + + # Get signed macOS .app if varlock is being released + # If the macOS build ran this run (swift changed), download the artifact directly + # Otherwise, restore from cross-run cache + - name: Download macOS native binary (from this run) + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.swift-changed == 'true' + uses: actions/download-artifact@v8 + with: + name: native-bin-macos-ci + path: packages/varlock/native-bins/darwin/VarlockEnclave.app + - name: Restore cached macOS native binary (from prior run) + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.swift-changed != 'true' + uses: actions/cache/restore@v5 + with: + path: packages/varlock/native-bins/darwin/VarlockEnclave.app + key: native-bin-macos-signed-${{ hashFiles('packages/encryption-binary-swift/swift/**') }} + - name: Verify and fix native binary permissions + if: steps.check-release.outputs.includes-varlock == 'true' + run: | + BINARY=packages/varlock/native-bins/darwin/VarlockEnclave.app/Contents/MacOS/varlock-local-encrypt + if [ ! -f "$BINARY" ]; then + echo "::error::macOS native binary not found — cannot publish varlock preview without it" + exit 1 + fi + chmod +x "$BINARY" + + # Get Rust native binaries if varlock is being released + # If the Rust build ran this run, download the artifacts; otherwise restore from cache + - name: Download Rust binaries (from this run) + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.rust-changed == 'true' + uses: actions/download-artifact@v8 + with: + pattern: native-bin-rust-ci-* + path: packages/varlock/native-bins/ + merge-multiple: false + # Flatten: download-artifact creates subdirs per artifact name, but we need linux-x64/ etc. + - name: Flatten Rust artifact directories (from this run) + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.rust-changed == 'true' + run: | + cd packages/varlock/native-bins + for dir in native-bin-rust-ci-*/; do + subdir=$(echo "$dir" | sed 's/native-bin-rust-ci-//' | sed 's/\///') + mv "$dir" "$subdir" 2>/dev/null || true + done + + - name: Restore cached Rust binaries (from prior run) + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.rust-changed != 'true' + run: | + CACHE_KEY_SUFFIX="${{ hashFiles('packages/encryption-binary-rust/Cargo.lock', 'packages/encryption-binary-rust/src/**') }}" + for SUBDIR in linux-x64 linux-arm64 win32-x64; do + echo "Restoring native-bin-rust-$SUBDIR..." + done + - uses: actions/cache/restore@v5 + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.rust-changed != 'true' + with: + path: packages/varlock/native-bins/linux-x64/ + key: native-bin-rust-linux-x64-${{ hashFiles('packages/encryption-binary-rust/Cargo.lock', 'packages/encryption-binary-rust/src/**') }} + - uses: actions/cache/restore@v5 + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.rust-changed != 'true' + with: + path: packages/varlock/native-bins/linux-arm64/ + key: native-bin-rust-linux-arm64-${{ hashFiles('packages/encryption-binary-rust/Cargo.lock', 'packages/encryption-binary-rust/src/**') }} + - uses: actions/cache/restore@v5 + if: steps.check-release.outputs.includes-varlock == 'true' && needs.build-and-test.outputs.rust-changed != 'true' + with: + path: packages/varlock/native-bins/win32-x64/ + key: native-bin-rust-win32-x64-${{ hashFiles('packages/encryption-binary-rust/Cargo.lock', 'packages/encryption-binary-rust/src/**') }} + + - name: Fix Rust binary permissions + if: steps.check-release.outputs.includes-varlock == 'true' + run: | + for BIN in packages/varlock/native-bins/linux-*/varlock-local-encrypt; do + [ -f "$BIN" ] && chmod +x "$BIN" && echo "Fixed: $BIN" + done + + - name: Build publishable npm packages + run: bun run build:libs + env: + BUILD_TYPE: preview + - name: Release preview packages + run: bun run scripts/release-preview.ts + env: + RELEASE_PACKAGES: ${{ steps.check-release.outputs.packages }} diff --git a/.gitignore b/.gitignore index b523d384..4f30189e 100644 --- a/.gitignore +++ b/.gitignore @@ -16,4 +16,5 @@ smoke-tests/pnpm-lock.yaml framework-tests/.packed framework-tests/.test-projects .magent +.claude/worktrees/ eslint-output.txt diff --git a/bun.lock b/bun.lock index 6246abf5..e47f7845 100644 --- a/bun.lock +++ b/bun.lock @@ -9,11 +9,10 @@ "@cloudflare/vite-plugin": "^1.30.1", "@eslint/js": "^10.0.1", "@stylistic/eslint-plugin": "^5.9.0", + "@types/node": "catalog:", "@typescript-eslint/eslint-plugin": "^8.56.1", "@typescript-eslint/parser": "^8.56.1", "@varlock/changeset-changelog": "workspace:*", - "@varlock/cloudflare-integration": "workspace:*", - "@varlock/keepass-plugin": "workspace:*", "@varlock/tsconfig": "workspace:*", "eslint": "^10.0.2", "eslint-plugin-es-x": "^9.5.0", @@ -25,6 +24,7 @@ "turbo": "^2.8.12", "typescript": "catalog:", "typescript-eslint": "^8.56.1", + "varlock": "workspace:*", }, }, "packages/changeset-changelog": { @@ -44,6 +44,14 @@ "vitest": "catalog:", }, }, + "packages/encryption-binary-swift": { + "name": "@varlock/encryption-binary-swift", + "version": "0.0.1", + "devDependencies": { + "@varlock/1password-plugin": "workspace:*", + "varlock": "workspace:*", + }, + }, "packages/env-spec-parser": { "name": "@env-spec/parser", "version": "0.2.0", @@ -116,7 +124,7 @@ }, "packages/integrations/nextjs": { "name": "@varlock/nextjs-integration", - "version": "0.3.2", + "version": "0.3.3", "devDependencies": { "@types/node": "catalog:", "tsup": "catalog:", @@ -130,7 +138,7 @@ }, "packages/integrations/vite": { "name": "@varlock/vite-integration", - "version": "0.2.9", + "version": "0.2.10", "devDependencies": { "@types/node": "catalog:", "ast-matcher": "^1.2.0", @@ -148,7 +156,7 @@ }, "packages/plugins/1password": { "name": "@varlock/1password-plugin", - "version": "0.3.2", + "version": "0.3.3", "devDependencies": { "@1password/sdk": "0.4.1-beta.1", "@1password/sdk-core": "0.4.1-beta.1", @@ -355,7 +363,7 @@ }, "packages/varlock": { "name": "varlock", - "version": "0.7.1", + "version": "0.7.2", "bin": { "varlock": "./bin/cli.js", }, @@ -1306,6 +1314,8 @@ "@varlock/dashlane-plugin": ["@varlock/dashlane-plugin@workspace:packages/plugins/dashlane"], + "@varlock/encryption-binary-swift": ["@varlock/encryption-binary-swift@workspace:packages/encryption-binary-swift"], + "@varlock/expo-integration": ["@varlock/expo-integration@workspace:packages/integrations/expo"], "@varlock/google-secret-manager-plugin": ["@varlock/google-secret-manager-plugin@workspace:packages/plugins/google-secret-manager"], diff --git a/eslint.config.mjs b/eslint.config.mjs index 324e9337..c2e14f55 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -54,6 +54,7 @@ export default tseslint.config( '**/out', '**/next-env.d.ts', '.magent', + '.claude', 'framework-tests/.test-projects', 'framework-tests/.packed', ], @@ -159,6 +160,7 @@ export default tseslint.config( }, }, { + // allow console.log in some scripts/tests/etc files: [ 'scripts/**', 'ignore/**', @@ -168,6 +170,8 @@ export default tseslint.config( 'packages/varlock/scripts/**', 'smoke-tests/**', 'framework-tests/**', + 'packages/encryption-binary-swift/scripts/**', + 'packages/encryption-binary-rust/scripts/**', ], rules: { 'no-console': 0, @@ -179,22 +183,6 @@ export default tseslint.config( '@typescript-eslint/no-require-imports': 0, }, }, - { - // plugin files use triple-slash directives for the `plugin` global type - // which is injected at runtime by varlock via globalThis - files: [ - 'smoke-tests/**/plugins/**', - 'packages/varlock/src/env-graph/test/plugins/**', - ], - languageOptions: { - globals: { - plugin: 'readonly', - }, - }, - rules: { - '@typescript-eslint/triple-slash-reference': 0, - }, - }, { // these files use build-time globals declared in globals.d.ts files: [ diff --git a/package.json b/package.json index 0cefe334..6e284b2c 100644 --- a/package.json +++ b/package.json @@ -18,8 +18,9 @@ "test:ci": "turbo test:ci --filter=\"!smoke-test-*\"", "smoke-test": "cd smoke-tests && bun run test", "test:frameworks": "cd framework-tests && bun run test", - "typecheck": "turbo typecheck --filter=\"!@varlock/website\" --filter=\"!smoke-test-*\" --filter=\"!varlock-docs-mcp\"", - "check": "bun run lint && bun run typecheck && bun run build:libs && bun run test:ci", + "typecheck": "tsc --noEmit", + "typecheck:all": "turbo typecheck --filter=\"!@varlock/website\" --filter=\"!smoke-test-*\" --filter=\"!varlock-docs-mcp\"", + "check": "bun run lint && bun run typecheck:all && bun run build:libs && bun run test:ci", "dev": "turbo run dev --concurrency=40 --parallel --filter=\"!smoke-test-*\"", "lint": "eslint .", "lint:fix": "eslint . --fix", @@ -34,12 +35,11 @@ "@cloudflare/vite-plugin": "^1.30.1", "@eslint/js": "^10.0.1", "@stylistic/eslint-plugin": "^5.9.0", + "@types/node": "catalog:", "@typescript-eslint/eslint-plugin": "^8.56.1", "@typescript-eslint/parser": "^8.56.1", "@varlock/changeset-changelog": "workspace:*", "@varlock/tsconfig": "workspace:*", - "@varlock/cloudflare-integration": "workspace:*", - "@varlock/keepass-plugin": "workspace:*", "eslint": "^10.0.2", "eslint-plugin-es-x": "^9.5.0", "eslint-plugin-fix-disabled-rules": "^0.0.2", @@ -49,7 +49,8 @@ "globals": "^17.3.0", "turbo": "^2.8.12", "typescript": "catalog:", - "typescript-eslint": "^8.56.1" + "typescript-eslint": "^8.56.1", + "varlock": "workspace:*" }, "packageManager": "bun@1.3.11", "engines": { diff --git a/packages/encryption-binary-rust/.gitignore b/packages/encryption-binary-rust/.gitignore new file mode 100644 index 00000000..2f7896d1 --- /dev/null +++ b/packages/encryption-binary-rust/.gitignore @@ -0,0 +1 @@ +target/ diff --git a/packages/encryption-binary-rust/Cargo.lock b/packages/encryption-binary-rust/Cargo.lock new file mode 100644 index 00000000..c7f66d7d --- /dev/null +++ b/packages/encryption-binary-rust/Cargo.lock @@ -0,0 +1,767 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "aead" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" +dependencies = [ + "crypto-common", + "generic-array", +] + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "aes-gcm" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" +dependencies = [ + "aead", + "aes", + "cipher", + "ctr", + "ghash", + "subtle", +] + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64ct" +version = "1.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "generic-array", + "rand_core", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "rand_core", + "typenum", +] + +[[package]] +name = "ctr" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" +dependencies = [ + "cipher", +] + +[[package]] +name = "der" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "ecdsa" +version = "0.16.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" +dependencies = [ + "der", + "digest", + "elliptic-curve", + "rfc6979", + "signature", + "spki", +] + +[[package]] +name = "elliptic-curve" +version = "0.13.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" +dependencies = [ + "base16ct", + "crypto-bigint", + "digest", + "ff", + "generic-array", + "group", + "hkdf", + "pem-rfc7468", + "pkcs8", + "rand_core", + "sec1", + "subtle", + "zeroize", +] + +[[package]] +name = "ff" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393" +dependencies = [ + "rand_core", + "subtle", +] + +[[package]] +name = "generic-array" +version = "0.14.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bb6743198531e02858aeaea5398fcc883e71851fcbcb5a2f773e2fb6cb1edf2" +dependencies = [ + "typenum", + "version_check", + "zeroize", +] + +[[package]] +name = "getrandom" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "ghash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" +dependencies = [ + "opaque-debug", + "polyval", +] + +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff", + "rand_core", + "subtle", +] + +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "inout" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" +dependencies = [ + "generic-array", +] + +[[package]] +name = "itoa" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f42a60cbdf9a97f5d2305f08a87dc4e09308d1276d28c869c684d7777685682" + +[[package]] +name = "libc" +version = "0.2.184" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48f5d2a454e16a5ea0f4ced81bd44e4cfc7bd3a507b61887c99fd3538b28e4af" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "memoffset" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "nix" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" +dependencies = [ + "bitflags", + "cfg-if", + "cfg_aliases", + "libc", + "memoffset", +] + +[[package]] +name = "opaque-debug" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" + +[[package]] +name = "p256" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" +dependencies = [ + "ecdsa", + "elliptic-curve", + "primeorder", + "sha2", +] + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "polyval" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" +dependencies = [ + "cfg-if", + "cpufeatures", + "opaque-debug", + "universal-hash", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "primeorder" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" +dependencies = [ + "elliptic-curve", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rfc6979" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" +dependencies = [ + "hmac", + "subtle", +] + +[[package]] +name = "sec1" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "subtle", + "zeroize", +] + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core", +] + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "typenum" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "universal-hash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" +dependencies = [ + "crypto-common", + "subtle", +] + +[[package]] +name = "varlock-local-encrypt" +version = "0.1.0" +dependencies = [ + "aes-gcm", + "base64", + "elliptic-curve", + "hkdf", + "hmac", + "libc", + "nix", + "p256", + "rand", + "serde", + "serde_json", + "sha2", + "windows", +] + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "windows" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6" +dependencies = [ + "windows-core", + "windows-targets", +] + +[[package]] +name = "windows-core" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-result", + "windows-strings", + "windows-targets", +] + +[[package]] +name = "windows-implement" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-result" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +dependencies = [ + "windows-result", + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "zerocopy" +version = "0.8.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eed437bf9d6692032087e337407a86f04cd8d6a16a37199ed57949d415bd68e9" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70e3cd084b1788766f53af483dd21f93881ff30d7320490ec3ef7526d203bad4" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/packages/encryption-binary-rust/Cargo.toml b/packages/encryption-binary-rust/Cargo.toml new file mode 100644 index 00000000..893f8b3a --- /dev/null +++ b/packages/encryption-binary-rust/Cargo.toml @@ -0,0 +1,56 @@ +[package] +name = "varlock-local-encrypt" +version = "0.1.0" +edition = "2021" +description = "Cross-platform local encryption binary for Varlock (Windows/Linux)" + +[[bin]] +name = "varlock-local-encrypt" +path = "src/main.rs" + +[dependencies] +# ECIES crypto (pure Rust, no OpenSSL) +p256 = { version = "0.13", features = ["ecdh", "pkcs8"] } +elliptic-curve = { version = "0.13", features = ["sec1", "pkcs8"] } +hkdf = "0.12" +hmac = "0.12" +sha2 = "0.10" +aes-gcm = "0.10" +rand = "0.8" + +# Serialization +serde = { version = "1", features = ["derive"] } +serde_json = "1" +base64 = "0.22" + +# Signal handling (Unix) +[target.'cfg(unix)'.dependencies] +libc = "0.2" + +# Platform — Linux (nix for peer credentials in IPC) +[target.'cfg(target_os = "linux")'.dependencies] +nix = { version = "0.29", features = ["process", "socket", "user", "fs"] } + +# Platform — Windows +[target.'cfg(target_os = "windows")'.dependencies] +windows = { version = "0.58", features = [ + # DPAPI + "Win32_Security_Cryptography", + "Win32_Security_Credentials", + "Win32_System_Memory", + # Named pipes + "Win32_System_Pipes", + "Win32_Storage_FileSystem", + "Win32_Foundation", + "Win32_System_IO", + # Windows Hello (UserConsentVerifier) + "Security_Credentials_UI", + "Foundation", +] } + +[profile.release] +strip = true +lto = true +opt-level = "z" # optimize for size +codegen-units = 1 +panic = "abort" diff --git a/packages/encryption-binary-rust/package.json b/packages/encryption-binary-rust/package.json new file mode 100644 index 00000000..2732a443 --- /dev/null +++ b/packages/encryption-binary-rust/package.json @@ -0,0 +1,12 @@ +{ + "name": "@varlock/encryption-binary-rust", + "version": "0.0.0", + "private": true, + "scripts": { + "build:current": "bun run scripts/build-rust.ts", + "build:linux-x64": "bun run scripts/build-rust.ts --target x86_64-unknown-linux-gnu", + "build:linux-arm64": "bun run scripts/build-rust.ts --target aarch64-unknown-linux-gnu", + "build:windows-x64": "bun run scripts/build-rust.ts --target x86_64-pc-windows-msvc", + "build:windows-arm64": "bun run scripts/build-rust.ts --target aarch64-pc-windows-msvc" + } +} diff --git a/packages/encryption-binary-rust/scripts/build-rust.ts b/packages/encryption-binary-rust/scripts/build-rust.ts new file mode 100644 index 00000000..512cee12 --- /dev/null +++ b/packages/encryption-binary-rust/scripts/build-rust.ts @@ -0,0 +1,113 @@ +#!/usr/bin/env bun + +/** + * Build script for the varlock-local-encrypt Rust binary. + * + * Usage: + * bun run scripts/build-rust.ts # build for current platform + * bun run scripts/build-rust.ts --target x86_64-unknown-linux-gnu + * bun run scripts/build-rust.ts --target x86_64-pc-windows-msvc + * + * The binary is placed in packages/varlock/native-bins/[-]/ + */ + +import { execSync } from 'node:child_process'; +import path from 'node:path'; +import fs from 'node:fs'; + +// ── CLI args ──────────────────────────────────────────────────── + +const args = process.argv.slice(2); + +function getArg(flag: string): string | undefined { + const idx = args.indexOf(flag); + return idx >= 0 ? args[idx + 1] : undefined; +} + +const target = getArg('--target'); + +// ── Paths ─────────────────────────────────────────────────────── + +const rustDir = path.resolve(import.meta.dir, '..'); +const varlockPkgDir = path.resolve(import.meta.dir, '..', '..', 'varlock'); +const binaryName = process.platform === 'win32' && !target?.includes('linux') + ? 'varlock-local-encrypt.exe' + : 'varlock-local-encrypt'; + +/** + * Map a Rust target triple to the native-bins subdirectory name. + */ +function getOutputSubdir(rustTarget?: string): string { + if (!rustTarget) { + // Current platform + if (process.platform === 'darwin') return 'darwin'; + if (process.platform === 'win32') return `win32-${process.arch}`; + return `${process.platform}-${process.arch}`; + } + + // Parse Rust target triple: --[-] + const parts = rustTarget.split('-'); + const arch = parts[0]; + const os = parts[2]; + + let nodeArch = arch; + if (arch === 'x86_64') nodeArch = 'x64'; + else if (arch === 'aarch64') nodeArch = 'arm64'; + + if (os === 'linux') return `linux-${nodeArch}`; + if (os === 'windows') return `win32-${nodeArch}`; + if (os === 'darwin' || os === 'apple') return 'darwin'; + return `${os}-${nodeArch}`; +} + +function run(cmd: string, opts?: { cwd?: string }) { + console.log(`> ${cmd}`); + execSync(cmd, { stdio: 'inherit', cwd: opts?.cwd ?? rustDir }); +} + +// ── Build ─────────────────────────────────────────────────────── + +const buildArgs = ['cargo', 'build', '--release']; +if (target) { + buildArgs.push('--target', target); +} + +run(buildArgs.join(' ')); + +// ── Copy to native-bins ───────────────────────────────────────── + +const subdir = getOutputSubdir(target); +const outputDir = path.join(varlockPkgDir, 'native-bins', subdir); +fs.mkdirSync(outputDir, { recursive: true }); + +// Find the built binary +let sourceBinary: string; +if (target) { + const targetBinaryName = target.includes('windows') + ? 'varlock-local-encrypt.exe' + : 'varlock-local-encrypt'; + sourceBinary = path.join(rustDir, 'target', target, 'release', targetBinaryName); +} else { + sourceBinary = path.join(rustDir, 'target', 'release', binaryName); +} + +if (!fs.existsSync(sourceBinary)) { + console.error(`Build succeeded but binary not found at: ${sourceBinary}`); + process.exit(1); +} + +const destBinary = path.join(outputDir, binaryName); +fs.copyFileSync(sourceBinary, destBinary); + +// Ensure executable +if (process.platform !== 'win32') { + fs.chmodSync(destBinary, 0o755); +} + +const stats = fs.statSync(destBinary); +const sizeKB = Math.round(stats.size / 1024); + +console.log(`\nBuilt: ${destBinary}`); +console.log(`Size: ${sizeKB} KB`); +console.log(`Platform: ${subdir}`); +console.log('Done!'); diff --git a/packages/encryption-binary-rust/src/crypto.rs b/packages/encryption-binary-rust/src/crypto.rs new file mode 100644 index 00000000..a6287457 --- /dev/null +++ b/packages/encryption-binary-rust/src/crypto.rs @@ -0,0 +1,252 @@ +//! ECIES implementation matching the JS (crypto.ts) and Swift (SecureEnclaveManager.swift) schemes. +//! +//! Wire-compatible payload format: +//! version(1) | ephemeralPubKey(65) | nonce(12) | ciphertext(N) | tag(16) +//! +//! Crypto: +//! - P-256 ECDH key agreement +//! - HKDF-SHA256 (salt: "varlock-ecies-v1", info: ephemeralPub || recipientPub) +//! - AES-256-GCM with random 12-byte nonce + +use aes_gcm::{ + aead::{Aead, KeyInit}, + Aes256Gcm, Nonce, +}; +use base64::{engine::general_purpose::STANDARD as BASE64, Engine}; +use elliptic_curve::sec1::{FromEncodedPoint, ToEncodedPoint}; +use hkdf::Hkdf; +use elliptic_curve::pkcs8::{DecodePrivateKey, EncodePrivateKey}; +use p256::{ + ecdh::EphemeralSecret, + elliptic_curve::rand_core::OsRng, + PublicKey, SecretKey, +}; +use sha2::Sha256; + +const PAYLOAD_VERSION: u8 = 0x01; +const HKDF_SALT: &[u8] = b"varlock-ecies-v1"; +const PUBLIC_KEY_LENGTH: usize = 65; // uncompressed P-256: 0x04 || x(32) || y(32) +const NONCE_LENGTH: usize = 12; +const TAG_LENGTH: usize = 16; +const HEADER_LENGTH: usize = 1 + PUBLIC_KEY_LENGTH + NONCE_LENGTH; + +/// A P-256 key pair with base64-encoded components. +pub struct KeyPair { + /// Base64-encoded uncompressed P-256 public key (65 bytes raw) + pub public_key: String, + /// Base64-encoded PKCS8 DER private key + pub private_key: String, +} + +/// Generate a new P-256 key pair. +/// +/// Returns the public key as uncompressed SEC1 (65 bytes, base64) and +/// the private key as PKCS8 DER (base64), matching the JS/Swift format. +pub fn generate_key_pair() -> Result { + let secret_key = SecretKey::random(&mut OsRng); + + // Public key: uncompressed SEC1 encoding (65 bytes) + let public_key_point = secret_key.public_key().to_encoded_point(false); + let public_key_bytes = public_key_point.as_bytes(); + + // Private key: PKCS8 DER encoding + let private_key_pkcs8 = secret_key + .to_pkcs8_der() + .map_err(|e| format!("Failed to encode private key as PKCS8: {e}"))?; + + Ok(KeyPair { + public_key: BASE64.encode(public_key_bytes), + private_key: BASE64.encode(private_key_pkcs8.as_bytes()), + }) +} + +/// Encrypt plaintext using ECIES with the recipient's public key. +/// +/// Only needs the public key — no private key or biometric auth required. +/// Returns base64-encoded ciphertext payload. +pub fn encrypt(public_key_base64: &str, plaintext: &[u8]) -> Result { + let recipient_pub_bytes = BASE64 + .decode(public_key_base64) + .map_err(|e| format!("Invalid public key base64: {e}"))?; + + if recipient_pub_bytes.len() != PUBLIC_KEY_LENGTH { + return Err(format!( + "Invalid public key length: {} (expected {})", + recipient_pub_bytes.len(), + PUBLIC_KEY_LENGTH + )); + } + + // Import recipient public key + let recipient_point = p256::EncodedPoint::from_bytes(&recipient_pub_bytes) + .map_err(|e| format!("Invalid public key encoding: {e}"))?; + let recipient_pub = PublicKey::from_encoded_point(&recipient_point) + .into_option() + .ok_or("Invalid P-256 public key point")?; + + // Generate ephemeral key pair + let ephemeral_secret = EphemeralSecret::random(&mut OsRng); + let ephemeral_pub = ephemeral_secret.public_key(); + let ephemeral_pub_bytes = ephemeral_pub.to_encoded_point(false); + let ephemeral_pub_raw = ephemeral_pub_bytes.as_bytes(); // 65 bytes + + // ECDH: ephemeral private × recipient public → shared secret + let shared_secret = ephemeral_secret.diffie_hellman(&recipient_pub); + let shared_secret_bytes = shared_secret.raw_secret_bytes(); + + // HKDF-SHA256 → AES-256 key + // info = ephemeralPubKey || recipientPubKey + let mut info = Vec::with_capacity(PUBLIC_KEY_LENGTH * 2); + info.extend_from_slice(ephemeral_pub_raw); + info.extend_from_slice(&recipient_pub_bytes); + + let hk = Hkdf::::new(Some(HKDF_SALT), shared_secret_bytes); + let mut aes_key = [0u8; 32]; + hk.expand(&info, &mut aes_key) + .map_err(|e| format!("HKDF expand failed: {e}"))?; + + // AES-256-GCM encrypt + let cipher = Aes256Gcm::new_from_slice(&aes_key) + .map_err(|e| format!("AES key init failed: {e}"))?; + + let mut nonce_bytes = [0u8; NONCE_LENGTH]; + rand::RngCore::fill_bytes(&mut OsRng, &mut nonce_bytes); + let nonce = Nonce::from_slice(&nonce_bytes); + + let ciphertext_with_tag = cipher + .encrypt(nonce, plaintext) + .map_err(|e| format!("AES-GCM encryption failed: {e}"))?; + + // AES-GCM appends tag to ciphertext — split for wire format + let ct_len = ciphertext_with_tag.len() - TAG_LENGTH; + let ciphertext = &ciphertext_with_tag[..ct_len]; + let tag = &ciphertext_with_tag[ct_len..]; + + // Assemble payload: version(1) | ephemeralPub(65) | nonce(12) | ciphertext(N) | tag(16) + let mut payload = Vec::with_capacity(HEADER_LENGTH + ciphertext.len() + TAG_LENGTH); + payload.push(PAYLOAD_VERSION); + payload.extend_from_slice(ephemeral_pub_raw); + payload.extend_from_slice(&nonce_bytes); + payload.extend_from_slice(ciphertext); + payload.extend_from_slice(tag); + + Ok(BASE64.encode(&payload)) +} + +/// Decrypt ciphertext using ECIES with the recipient's private key. +/// +/// `private_key_base64` is PKCS8 DER, `public_key_base64` is uncompressed SEC1. +/// `ciphertext_base64` is the base64-encoded wire-format payload. +/// Returns decrypted plaintext bytes. +pub fn decrypt( + private_key_base64: &str, + public_key_base64: &str, + ciphertext_base64: &str, +) -> Result, String> { + let payload = BASE64 + .decode(ciphertext_base64) + .map_err(|e| format!("Invalid ciphertext base64: {e}"))?; + + if payload.len() < HEADER_LENGTH + TAG_LENGTH { + return Err("Payload too short".into()); + } + + // Parse payload + let version = payload[0]; + if version != PAYLOAD_VERSION { + return Err(format!("Unsupported payload version: {version}")); + } + + let ephemeral_pub_raw = &payload[1..1 + PUBLIC_KEY_LENGTH]; + let nonce_bytes = &payload[1 + PUBLIC_KEY_LENGTH..HEADER_LENGTH]; + let ciphertext_and_tag = &payload[HEADER_LENGTH..]; + + if ciphertext_and_tag.len() < TAG_LENGTH { + return Err("Payload too short for tag".into()); + } + + // Import private key from PKCS8 DER + let private_key_der = BASE64 + .decode(private_key_base64) + .map_err(|e| format!("Invalid private key base64: {e}"))?; + let secret_key = SecretKey::from_pkcs8_der(&private_key_der) + .map_err(|e| format!("Invalid PKCS8 private key: {e}"))?; + + // Import ephemeral public key + let ephemeral_point = p256::EncodedPoint::from_bytes(ephemeral_pub_raw) + .map_err(|e| format!("Invalid ephemeral public key: {e}"))?; + let ephemeral_pub = PublicKey::from_encoded_point(&ephemeral_point) + .into_option() + .ok_or("Invalid ephemeral P-256 point")?; + + // Recipient public key bytes for HKDF info + let recipient_pub_bytes = BASE64 + .decode(public_key_base64) + .map_err(|e| format!("Invalid public key base64: {e}"))?; + + // ECDH: recipient private × ephemeral public → shared secret + let shared_secret = p256::ecdh::diffie_hellman( + secret_key.to_nonzero_scalar(), + ephemeral_pub.as_affine(), + ); + let shared_secret_bytes = shared_secret.raw_secret_bytes(); + + // HKDF-SHA256 → AES-256 key (must match encrypt side) + let mut info = Vec::with_capacity(PUBLIC_KEY_LENGTH * 2); + info.extend_from_slice(ephemeral_pub_raw); + info.extend_from_slice(&recipient_pub_bytes); + + let hk = Hkdf::::new(Some(HKDF_SALT), shared_secret_bytes); + let mut aes_key = [0u8; 32]; + hk.expand(&info, &mut aes_key) + .map_err(|e| format!("HKDF expand failed: {e}"))?; + + // AES-256-GCM decrypt + // aes-gcm expects ciphertext || tag concatenated (same as wire format after header) + let cipher = Aes256Gcm::new_from_slice(&aes_key) + .map_err(|e| format!("AES key init failed: {e}"))?; + let nonce = Nonce::from_slice(nonce_bytes); + + let plaintext = cipher + .decrypt(nonce, ciphertext_and_tag) + .map_err(|_| "Decryption failed: invalid ciphertext or key".to_string())?; + + Ok(plaintext) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_roundtrip() { + let kp = generate_key_pair().unwrap(); + let plaintext = b"hello world"; + let encrypted = encrypt(&kp.public_key, plaintext).unwrap(); + let decrypted = decrypt(&kp.private_key, &kp.public_key, &encrypted).unwrap(); + assert_eq!(decrypted, plaintext); + } + + #[test] + fn test_payload_format() { + let kp = generate_key_pair().unwrap(); + let encrypted = encrypt(&kp.public_key, b"test").unwrap(); + let payload = BASE64.decode(&encrypted).unwrap(); + + // Check version byte + assert_eq!(payload[0], PAYLOAD_VERSION); + // Check total minimum length: 1 + 65 + 12 + 0 + 16 = 94 + assert!(payload.len() >= HEADER_LENGTH + TAG_LENGTH); + // Check ephemeral public key starts with 0x04 (uncompressed) + assert_eq!(payload[1], 0x04); + } + + #[test] + fn test_different_keys_cannot_decrypt() { + let kp1 = generate_key_pair().unwrap(); + let kp2 = generate_key_pair().unwrap(); + let encrypted = encrypt(&kp1.public_key, b"secret").unwrap(); + let result = decrypt(&kp2.private_key, &kp2.public_key, &encrypted); + assert!(result.is_err()); + } +} diff --git a/packages/encryption-binary-rust/src/daemon.rs b/packages/encryption-binary-rust/src/daemon.rs new file mode 100644 index 00000000..d01c9b03 --- /dev/null +++ b/packages/encryption-binary-rust/src/daemon.rs @@ -0,0 +1,326 @@ +//! Daemon mode — long-lived process with IPC server, session management, and auto-shutdown. +//! +//! Matches the Swift daemon's behavior: +//! - Accepts connections over Unix socket (Linux) or named pipe (Windows) +//! - Handles: decrypt, encrypt, ping, invalidate-session +//! - On Windows with Hello: requires biometric before first decrypt per session +//! - No prompt-secret (no GUI on Linux — handled by terminal prompt in TS) +//! - Auto-shutdown after 30 minutes of inactivity +//! - Session invalidation on SIGTERM/SIGINT + +use crate::crypto; +use crate::ipc::{IpcServer, MessageHandler}; +use crate::key_store; +use serde_json::{json, Value}; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::{Arc, Mutex}; +use std::time::{Duration, Instant}; + +const DEFAULT_KEY_ID: &str = "varlock-default"; +const DAEMON_INACTIVITY_TIMEOUT: Duration = Duration::from_secs(30 * 60); // 30 minutes +const SESSION_TIMEOUT: Duration = Duration::from_secs(5 * 60); // 5 minutes per session + +/// Per-TTY session state. +struct SessionManager { + /// Map of TTY IDs to their session creation time. + /// Sessions expire after SESSION_TIMEOUT. + active_sessions: std::collections::HashMap, + /// Last IPC activity timestamp for daemon timeout. + last_activity: Instant, + /// Whether biometric verification is available on this platform. + biometric_available: bool, +} + +impl SessionManager { + fn new() -> Self { + let info = key_store::get_platform_info(); + Self { + active_sessions: std::collections::HashMap::new(), + last_activity: Instant::now(), + biometric_available: info.biometric_available, + } + } + + fn note_activity(&mut self) { + self.last_activity = Instant::now(); + } + + fn is_session_warm(&self, tty_id: &Option) -> bool { + let key = tty_id.as_deref().unwrap_or("__no_tty__"); + match self.active_sessions.get(key) { + Some(created_at) => created_at.elapsed() < SESSION_TIMEOUT, + None => false, + } + } + + fn mark_session_warm(&mut self, tty_id: &Option) { + let key = tty_id.as_deref().unwrap_or("__no_tty__").to_string(); + self.active_sessions.insert(key, Instant::now()); + } + + fn invalidate_all(&mut self) { + self.active_sessions.clear(); + } + + #[allow(dead_code)] + fn has_any_sessions(&self) -> bool { + self.active_sessions.values().any(|t| t.elapsed() < SESSION_TIMEOUT) + } + + fn is_timed_out(&self) -> bool { + self.last_activity.elapsed() > DAEMON_INACTIVITY_TIMEOUT + } + + /// Whether the next decrypt should require biometric verification. + fn needs_biometric(&self, tty_id: &Option) -> bool { + self.biometric_available && !self.is_session_warm(tty_id) + } +} + +/// Run the daemon. +pub fn run_daemon(socket_path: &str, pid_path: Option<&str>) -> Result<(), String> { + // Write PID file + if let Some(pid_path) = pid_path { + if let Some(parent) = std::path::Path::new(pid_path).parent() { + let _ = std::fs::create_dir_all(parent); + } + std::fs::write(pid_path, std::process::id().to_string()) + .map_err(|e| format!("Failed to write PID file: {e}"))?; + } + + let session_manager = Arc::new(Mutex::new(SessionManager::new())); + let mut server = IpcServer::new(socket_path); + + // Activity callback + let sm_activity = session_manager.clone(); + server.set_activity_callback(move || { + if let Ok(mut sm) = sm_activity.lock() { + sm.note_activity(); + } + }); + + // Message handler + let sm_handler = session_manager.clone(); + let handler: MessageHandler = Box::new(move |message: Value, tty_id: Option| { + let action = message + .get("action") + .and_then(|v| v.as_str()) + .unwrap_or(""); + + match action { + "decrypt" => handle_decrypt(&message, &tty_id, &sm_handler), + "encrypt" => handle_encrypt(&message), + "ping" => handle_ping(&tty_id, &sm_handler), + "invalidate-session" => handle_invalidate(&sm_handler), + _ => json!({"error": format!("Unknown action: {action}")}), + } + }); + server.set_message_handler(handler); + + let running = server.running_flag(); + + // Signal handling + let pid_path_owned = pid_path.map(|s| s.to_string()); + + #[cfg(unix)] + { + let _ = ctrlc_handler(running.clone()); + } + + // Inactivity timeout checker + session expiry cleanup + let sm_timeout = session_manager.clone(); + let running_timeout = running.clone(); + std::thread::spawn(move || { + loop { + std::thread::sleep(Duration::from_secs(60)); + if !running_timeout.load(Ordering::SeqCst) { + break; + } + if let Ok(mut sm) = sm_timeout.lock() { + // Clean up expired sessions + sm.active_sessions.retain(|_, created_at| { + created_at.elapsed() < SESSION_TIMEOUT + }); + + if sm.is_timed_out() { + running_timeout.store(false, Ordering::SeqCst); + break; + } + } + } + }); + + // Print ready message (matches Swift daemon format) + let ready = json!({ + "ready": true, + "pid": std::process::id(), + "socketPath": socket_path, + }); + println!("{}", ready); + use std::io::Write; + let _ = std::io::stdout().flush(); + + // Start server (blocks) + let result = server.start(); + + // Cleanup + if let Some(pp) = &pid_path_owned { + let _ = std::fs::remove_file(pp); + } + + result +} + +// ── Message handlers ───────────────────────────────────────────── + +fn handle_decrypt( + message: &Value, + tty_id: &Option, + sm: &Arc>, +) -> Value { + let payload = match message.get("payload") { + Some(p) => p, + None => return json!({"error": "Missing payload"}), + }; + + let ciphertext_b64 = match payload.get("ciphertext").and_then(|v| v.as_str()) { + Some(ct) => ct, + None => return json!({"error": "Missing or invalid ciphertext in payload"}), + }; + + let key_id = payload + .get("keyId") + .and_then(|v| v.as_str()) + .unwrap_or(DEFAULT_KEY_ID); + + // Check if biometric verification is needed + let needs_bio = sm.lock().map(|s| s.needs_biometric(tty_id)).unwrap_or(false); + + if needs_bio { + match verify_user_presence() { + Ok(true) => {} // Verified — proceed + Ok(false) => return json!({"error": "User verification cancelled"}), + Err(e) => return json!({"error": format!("Biometric verification failed: {e}")}), + } + } + + // Load key and decrypt + match key_store::load_key(key_id) { + Ok((private_key_der, public_key_b64)) => { + let private_key_b64 = base64::Engine::encode( + &base64::engine::general_purpose::STANDARD, + &private_key_der, + ); + + match crypto::decrypt(&private_key_b64, &public_key_b64, ciphertext_b64) { + Ok(plaintext_bytes) => { + match String::from_utf8(plaintext_bytes) { + Ok(plaintext) => { + // Mark session as warm + if let Ok(mut session) = sm.lock() { + session.mark_session_warm(tty_id); + } + json!({"result": plaintext}) + } + Err(_) => json!({"error": "Decrypted data is not valid UTF-8"}), + } + } + Err(e) => json!({"error": e}), + } + } + Err(e) => json!({"error": e}), + } +} + +fn handle_encrypt(message: &Value) -> Value { + let payload = match message.get("payload") { + Some(p) => p, + None => return json!({"error": "Missing payload"}), + }; + + let plaintext = match payload.get("plaintext").and_then(|v| v.as_str()) { + Some(pt) => pt, + None => return json!({"error": "Missing plaintext in payload"}), + }; + + let key_id = payload + .get("keyId") + .and_then(|v| v.as_str()) + .unwrap_or(DEFAULT_KEY_ID); + + match key_store::load_public_key(key_id) { + Ok(public_key_b64) => match crypto::encrypt(&public_key_b64, plaintext.as_bytes()) { + Ok(ciphertext) => json!({"result": ciphertext}), + Err(e) => json!({"error": e}), + }, + Err(e) => json!({"error": e}), + } +} + +fn handle_ping(tty_id: &Option, sm: &Arc>) -> Value { + let session_warm = sm + .lock() + .map(|s| s.is_session_warm(tty_id)) + .unwrap_or(false); + + json!({ + "result": { + "pong": true, + "sessionWarm": session_warm, + "ttyId": tty_id.as_deref().unwrap_or(""), + } + }) +} + +fn handle_invalidate(sm: &Arc>) -> Value { + if let Ok(mut session) = sm.lock() { + session.invalidate_all(); + } + json!({"result": "all sessions invalidated"}) +} + +// ── Biometric verification ─────────────────────────────────────── + +/// Verify user presence using platform-specific biometric. +/// Returns Ok(true) if verified, Ok(false) if cancelled. +fn verify_user_presence() -> Result { + #[cfg(target_os = "windows")] + { + crate::key_store::windows_hello::verify_user("Varlock needs to decrypt your secrets") + } + + #[cfg(not(target_os = "windows"))] + { + // No biometric on Linux — sessions are always warm + Ok(true) + } +} + +// ── Signal handling ────────────────────────────────────────────── + +#[cfg(unix)] +fn ctrlc_handler(running: Arc) -> Result<(), String> { + unsafe { + libc::signal(libc::SIGTERM, signal_handler as *const () as libc::sighandler_t); + libc::signal(libc::SIGINT, signal_handler as *const () as libc::sighandler_t); + } + + RUNNING_FLAG + .lock() + .map_err(|e| format!("Failed to set signal handler: {e}"))? + .replace(running); + + Ok(()) +} + +#[cfg(unix)] +static RUNNING_FLAG: std::sync::Mutex>> = std::sync::Mutex::new(None); + +#[cfg(unix)] +extern "C" fn signal_handler(_sig: libc::c_int) { + if let Ok(guard) = RUNNING_FLAG.lock() { + if let Some(ref running) = *guard { + running.store(false, Ordering::SeqCst); + } + } +} diff --git a/packages/encryption-binary-rust/src/ipc.rs b/packages/encryption-binary-rust/src/ipc.rs new file mode 100644 index 00000000..9657cc29 --- /dev/null +++ b/packages/encryption-binary-rust/src/ipc.rs @@ -0,0 +1,456 @@ +//! IPC server for the daemon mode. +//! +//! Protocol: Length-prefixed JSON over Unix domain socket (Linux) or named pipe (Windows). +//! +//! [4 bytes: UInt32 LE message length] +//! [N bytes: UTF-8 JSON] +//! +//! Request: { "id": "...", "action": "...", "payload": { ... } } +//! Response: { "id": "...", "result": ... } or { "id": "...", "error": "..." } + +use serde_json::Value; +use std::io::{Read, Write}; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::Arc; + +#[cfg(unix)] +use std::os::unix::net::{UnixListener, UnixStream}; + +const MAX_MESSAGE_SIZE: u32 = 10_000_000; // 10MB safety limit + +/// Message handler callback type. +pub type MessageHandler = Box) -> Value + Send + Sync>; + +/// IPC server that listens for length-prefixed JSON messages. +pub struct IpcServer { + socket_path: String, + running: Arc, + message_handler: Option>, + on_activity: Option>, +} + +impl IpcServer { + pub fn new(socket_path: &str) -> Self { + Self { + socket_path: socket_path.to_string(), + running: Arc::new(AtomicBool::new(false)), + message_handler: None, + on_activity: None, + } + } + + pub fn set_message_handler(&mut self, handler: MessageHandler) { + self.message_handler = Some(Arc::new(handler)); + } + + pub fn set_activity_callback(&mut self, callback: impl Fn() + Send + Sync + 'static) { + self.on_activity = Some(Arc::new(callback)); + } + + pub fn running_flag(&self) -> Arc { + self.running.clone() + } + + /// Start the IPC server. This blocks the calling thread. + #[cfg(unix)] + pub fn start(&self) -> Result<(), String> { + // Clean up stale socket + let _ = std::fs::remove_file(&self.socket_path); + + // Ensure parent directory exists + if let Some(parent) = std::path::Path::new(&self.socket_path).parent() { + std::fs::create_dir_all(parent) + .map_err(|e| format!("Failed to create socket directory: {e}"))?; + } + + let listener = UnixListener::bind(&self.socket_path) + .map_err(|e| format!("Socket bind failed: {e}"))?; + + // Set socket permissions (owner only) + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let _ = std::fs::set_permissions( + &self.socket_path, + std::fs::Permissions::from_mode(0o600), + ); + } + + // Set non-blocking so we can check the running flag + listener + .set_nonblocking(true) + .map_err(|e| format!("Failed to set non-blocking: {e}"))?; + + self.running.store(true, Ordering::SeqCst); + + while self.running.load(Ordering::SeqCst) { + match listener.accept() { + Ok((stream, _)) => { + if let Some(cb) = &self.on_activity { + cb(); + } + + let handler = self.message_handler.clone(); + let on_activity = self.on_activity.clone(); + let running = self.running.clone(); + + // Get peer TTY identity + let tty_id = get_peer_tty_id(&stream); + + std::thread::spawn(move || { + handle_client(stream, handler, on_activity, running, tty_id); + }); + } + Err(ref e) if e.kind() == std::io::ErrorKind::WouldBlock => { + // No pending connection — sleep briefly and retry + std::thread::sleep(std::time::Duration::from_millis(50)); + } + Err(e) => { + if self.running.load(Ordering::SeqCst) { + eprintln!("Accept error: {e}"); + } + break; + } + } + } + + // Cleanup + let _ = std::fs::remove_file(&self.socket_path); + Ok(()) + } + + /// Start the IPC server on Windows using named pipes. + /// + /// Named pipes work with Node.js `net.connect()` out of the box — + /// the TS daemon client's `socket.connect(pipePath)` just works. + #[cfg(windows)] + pub fn start(&self) -> Result<(), String> { + use std::io::{Read, Write}; + use windows::Win32::Foundation::{CloseHandle, HANDLE, INVALID_HANDLE_VALUE}; + use windows::Win32::Storage::FileSystem::{ + ReadFile, WriteFile, FlushFileBuffers, + }; + use windows::Win32::System::Pipes::{ + ConnectNamedPipe, CreateNamedPipeW, DisconnectNamedPipe, + PIPE_ACCESS_DUPLEX, PIPE_TYPE_BYTE, PIPE_READMODE_BYTE, PIPE_WAIT, + }; + use windows::core::HSTRING; + + self.running.store(true, Ordering::SeqCst); + + let pipe_name = HSTRING::from(&self.socket_path); + + while self.running.load(Ordering::SeqCst) { + // Create a new named pipe instance for each client + let pipe_handle = unsafe { + CreateNamedPipeW( + &pipe_name, + PIPE_ACCESS_DUPLEX, + PIPE_TYPE_BYTE | PIPE_READMODE_BYTE | PIPE_WAIT, + 10, // max instances + 65536, // out buffer + 65536, // in buffer + 0, // default timeout + None, // default security + ) + }.map_err(|e| format!("CreateNamedPipe failed: {e}"))?; + + if pipe_handle == INVALID_HANDLE_VALUE { + return Err("CreateNamedPipe returned invalid handle".into()); + } + + // Wait for a client to connect (blocking) + let connected = unsafe { ConnectNamedPipe(pipe_handle, None) }; + if connected.is_err() { + // ERROR_PIPE_CONNECTED means client connected between Create and Connect — that's OK + let err = unsafe { windows::Win32::Foundation::GetLastError() }; + if err != windows::Win32::Foundation::ERROR_PIPE_CONNECTED { + unsafe { let _ = CloseHandle(pipe_handle); } + if !self.running.load(Ordering::SeqCst) { + break; + } + continue; + } + } + + if let Some(cb) = &self.on_activity { + cb(); + } + + let handler = self.message_handler.clone(); + let on_activity = self.on_activity.clone(); + let running = self.running.clone(); + + // Windows doesn't have per-connection TTY identity like Unix + // Use the pipe client process ID if available + let tty_id: Option = None; + + std::thread::spawn(move || { + handle_windows_client(pipe_handle, handler, on_activity, running, tty_id); + unsafe { + let _ = DisconnectNamedPipe(pipe_handle); + let _ = CloseHandle(pipe_handle); + } + }); + } + + Ok(()) + } + + pub fn stop(&self) { + self.running.store(false, Ordering::SeqCst); + } +} + +impl Drop for IpcServer { + fn drop(&mut self) { + self.stop(); + let _ = std::fs::remove_file(&self.socket_path); + } +} + +// ── Client handling ────────────────────────────────────────────── + +#[cfg(unix)] +fn handle_client( + mut stream: UnixStream, + handler: Option>, + on_activity: Option>, + running: Arc, + tty_id: Option, +) { + // Set blocking for reads + let _ = stream.set_nonblocking(false); + let _ = stream.set_read_timeout(Some(std::time::Duration::from_secs(300))); + + while running.load(Ordering::SeqCst) { + // Read 4-byte length prefix (little-endian) + let mut len_buf = [0u8; 4]; + match stream.read_exact(&mut len_buf) { + Ok(()) => {} + Err(_) => break, // Connection closed or error + } + + let msg_len = u32::from_le_bytes(len_buf); + if msg_len == 0 || msg_len > MAX_MESSAGE_SIZE { + break; + } + + // Read message body + let mut msg_buf = vec![0u8; msg_len as usize]; + match stream.read_exact(&mut msg_buf) { + Ok(()) => {} + Err(_) => break, + } + + // Parse JSON + let message: Value = match serde_json::from_slice(&msg_buf) { + Ok(v) => v, + Err(_) => { + let _ = send_response(&mut stream, None, &serde_json::json!({"error": "Invalid JSON"})); + continue; + } + }; + + if let Some(cb) = &on_activity { + cb(); + } + + // Handle message + let id = message.get("id").and_then(|v| v.as_str()).map(|s| s.to_string()); + + let response = if let Some(ref handler) = handler { + handler(message, tty_id.clone()) + } else { + serde_json::json!({"error": "No handler"}) + }; + + if send_response(&mut stream, id.as_deref(), &response).is_err() { + break; + } + } +} + +fn send_response(stream: &mut impl Write, id: Option<&str>, response: &Value) -> Result<(), String> { + let mut full_response = response.clone(); + if let (Some(id), Some(obj)) = (id, full_response.as_object_mut()) { + obj.insert("id".to_string(), Value::String(id.to_string())); + } + + let json_bytes = serde_json::to_vec(&full_response) + .map_err(|e| format!("Serialization failed: {e}"))?; + + let len = (json_bytes.len() as u32).to_le_bytes(); + stream.write_all(&len).map_err(|e| format!("Write failed: {e}"))?; + stream.write_all(&json_bytes).map_err(|e| format!("Write failed: {e}"))?; + stream.flush().map_err(|e| format!("Flush failed: {e}"))?; + + Ok(()) +} + +// ── Peer TTY identity (Linux) ──────────────────────────────────── + +#[cfg(target_os = "linux")] +fn get_peer_tty_id(stream: &UnixStream) -> Option { + use nix::sys::socket::{getsockopt, sockopt::PeerCredentials}; + use std::os::unix::io::AsRawFd; + + let fd = stream.as_raw_fd(); + let creds = getsockopt(fd, PeerCredentials).ok()?; + let pid = creds.pid(); + + if pid <= 0 { + return None; + } + + // Read the process's controlling terminal from /proc + get_tty_for_pid(pid as u32) +} + +#[cfg(target_os = "linux")] +fn get_tty_for_pid(pid: u32) -> Option { + // Read /proc//stat to get the tty_nr field (field 7, 0-indexed 6) + let stat = std::fs::read_to_string(format!("/proc/{pid}/stat")).ok()?; + + // The stat line format is: pid (comm) state ppid pgrp session tty_nr ... + // comm can contain spaces and parens, so find the last ')' first + let after_comm = stat.rfind(')')? + 2; + let fields: Vec<&str> = stat[after_comm..].split_whitespace().collect(); + + // After the closing paren: state(0) ppid(1) pgrp(2) session(3) tty_nr(4) + let tty_nr: u32 = fields.get(4)?.parse().ok()?; + if tty_nr == 0 { + return None; // No controlling tty + } + + // Get the session leader PID (field 3 after comm) + let session_pid: u32 = fields.get(3)?.parse().ok()?; + + // Get session leader start time for uniqueness + let start_time = get_process_start_time(session_pid).unwrap_or(0); + + // Convert tty_nr to a name (major:minor) + let major = (tty_nr >> 8) & 0xff; + let minor = (tty_nr & 0xff) | ((tty_nr >> 12) & 0xfff00); + let tty_name = format!("tty{major}:{minor}"); + + Some(format!("{tty_name}:{start_time}")) +} + +#[cfg(target_os = "linux")] +fn get_process_start_time(pid: u32) -> Option { + let stat = std::fs::read_to_string(format!("/proc/{pid}/stat")).ok()?; + let after_comm = stat.rfind(')')? + 2; + let fields: Vec<&str> = stat[after_comm..].split_whitespace().collect(); + // Field 19 after comm is starttime (in clock ticks since boot) + fields.get(19)?.parse().ok() +} + +#[cfg(not(any(target_os = "linux", target_os = "windows")))] +fn get_peer_tty_id(_stream: &UnixStream) -> Option { + None +} + +// ── Windows named pipe client handling ─────────────────────────── + +#[cfg(windows)] +fn handle_windows_client( + pipe: windows::Win32::Foundation::HANDLE, + handler: Option>, + on_activity: Option>, + running: Arc, + tty_id: Option, +) { + use windows::Win32::Storage::FileSystem::{ReadFile, WriteFile, FlushFileBuffers}; + + while running.load(Ordering::SeqCst) { + // Read 4-byte length prefix + let mut len_buf = [0u8; 4]; + let mut bytes_read = 0u32; + let ok = unsafe { + ReadFile(pipe, Some(&mut len_buf), Some(&mut bytes_read), None) + }; + if ok.is_err() || bytes_read != 4 { + break; + } + + let msg_len = u32::from_le_bytes(len_buf); + if msg_len == 0 || msg_len > MAX_MESSAGE_SIZE { + break; + } + + // Read message body + let mut msg_buf = vec![0u8; msg_len as usize]; + let mut total_read = 0u32; + while (total_read as usize) < msg_buf.len() { + let mut chunk_read = 0u32; + let ok = unsafe { + ReadFile( + pipe, + Some(&mut msg_buf[total_read as usize..]), + Some(&mut chunk_read), + None, + ) + }; + if ok.is_err() || chunk_read == 0 { + return; + } + total_read += chunk_read; + } + + // Parse JSON + let message: Value = match serde_json::from_slice(&msg_buf) { + Ok(v) => v, + Err(_) => { + let _ = send_windows_response(pipe, None, &serde_json::json!({"error": "Invalid JSON"})); + continue; + } + }; + + if let Some(cb) = &on_activity { + cb(); + } + + let id = message.get("id").and_then(|v| v.as_str()).map(|s| s.to_string()); + + let response = if let Some(ref handler) = handler { + handler(message, tty_id.clone()) + } else { + serde_json::json!({"error": "No handler"}) + }; + + if send_windows_response(pipe, id.as_deref(), &response).is_err() { + break; + } + } +} + +#[cfg(windows)] +fn send_windows_response( + pipe: windows::Win32::Foundation::HANDLE, + id: Option<&str>, + response: &Value, +) -> Result<(), String> { + use windows::Win32::Storage::FileSystem::{WriteFile, FlushFileBuffers}; + + let mut full_response = response.clone(); + if let (Some(id), Some(obj)) = (id, full_response.as_object_mut()) { + obj.insert("id".to_string(), Value::String(id.to_string())); + } + + let json_bytes = serde_json::to_vec(&full_response) + .map_err(|e| format!("Serialization failed: {e}"))?; + + let len = (json_bytes.len() as u32).to_le_bytes(); + + let mut written = 0u32; + unsafe { + WriteFile(pipe, Some(&len), Some(&mut written), None) + .map_err(|e| format!("Write failed: {e}"))?; + WriteFile(pipe, Some(&json_bytes), Some(&mut written), None) + .map_err(|e| format!("Write failed: {e}"))?; + let _ = FlushFileBuffers(pipe); + } + + Ok(()) +} diff --git a/packages/encryption-binary-rust/src/key_store/linux.rs b/packages/encryption-binary-rust/src/key_store/linux.rs new file mode 100644 index 00000000..7b47192a --- /dev/null +++ b/packages/encryption-binary-rust/src/key_store/linux.rs @@ -0,0 +1,287 @@ +//! Linux key protection using TPM2 seal/unseal via tpm2-tools. +//! +//! Strategy: +//! 1. Create a TPM2 Storage Root Key (SRK) under the owner hierarchy +//! (deterministic — same template always produces the same key) +//! 2. Seal the PKCS8 private key under the SRK using tpm2_create +//! 3. Store the sealed public/private blobs on disk +//! 4. To decrypt: recreate SRK, load sealed object, unseal +//! +//! The sealed blob can ONLY be unsealed by the same machine's TPM chip. +//! Survives reboots, survives logout — the TPM hardware is the anchor. +//! +//! Requirements: +//! - TPM 2.0 hardware (present on most machines since ~2018) +//! - tpm2-tools installed (tpm2_createprimary, tpm2_create, tpm2_load, tpm2_unseal) +//! - Access to /dev/tpmrm0 (user must be in 'tss' group or have udev rule) +//! +//! Fallback: If TPM2 is not available, falls back to file-based (plaintext) storage. + +use std::io::Write; +use std::process::Command; + +/// Detailed result of TPM2 availability check. +pub enum Tpm2Status { + /// TPM2 is available and ready to use + Available, + /// tpm2-tools not installed + ToolsNotInstalled, + /// /dev/tpmrm0 doesn't exist (no TPM hardware or not enabled in BIOS) + NoDevice, + /// /dev/tpmrm0 exists but not accessible (permission issue) + PermissionDenied, + /// TPM device exists but SRK creation failed (TPM in bad state?) + SrkFailed(String), +} + +/// Check if TPM2 is available and usable. +pub fn check_tpm2_status() -> Tpm2Status { + // Check if tpm2_createprimary is in PATH + if Command::new("which") + .arg("tpm2_createprimary") + .output() + .map(|o| !o.status.success()) + .unwrap_or(true) + { + return Tpm2Status::ToolsNotInstalled; + } + + // Check if TPM device exists + let tpmrm = std::path::Path::new("/dev/tpmrm0"); + if !tpmrm.exists() { + return Tpm2Status::NoDevice; + } + + // Check if we can access it + match std::fs::metadata(tpmrm) { + Ok(meta) => { + use std::os::unix::fs::MetadataExt; + // Try to actually use it with a quick SRK creation + let tmp = std::env::temp_dir().join(format!("varlock-tpm-check-{}", std::process::id())); + let result = Command::new("tpm2_createprimary") + .args(["-C", "o", "-g", "sha256", "-G", "ecc256", "-c"]) + .arg(&tmp) + .output(); + + let _ = std::fs::remove_file(&tmp); + + match result { + Ok(output) if output.status.success() => Tpm2Status::Available, + Ok(output) => { + let stderr = String::from_utf8_lossy(&output.stderr); + if stderr.contains("Permission denied") || stderr.contains("TCTI") { + Tpm2Status::PermissionDenied + } else { + Tpm2Status::SrkFailed(stderr.to_string()) + } + } + Err(_) => Tpm2Status::PermissionDenied, + } + } + Err(_) => Tpm2Status::PermissionDenied, + } +} + +/// Simple check: is TPM2 available? +pub fn is_tpm2_available() -> bool { + matches!(check_tpm2_status(), Tpm2Status::Available) +} + +/// Protect a private key by sealing it with the TPM. +/// +/// Returns a blob containing the sealed public + private portions, +/// which can only be unsealed by this machine's TPM. +/// +/// Format: pub_len(4 LE) || pub_data || priv_data +pub fn tpm2_protect(private_key_der: &[u8]) -> Result, String> { + let tmp_dir = std::env::temp_dir().join(format!("varlock-tpm-{}", std::process::id())); + std::fs::create_dir_all(&tmp_dir) + .map_err(|e| format!("Failed to create temp dir: {e}"))?; + + let srk_ctx = tmp_dir.join("srk.ctx"); + let sealed_pub = tmp_dir.join("sealed.pub"); + let sealed_priv = tmp_dir.join("sealed.priv"); + let input_file = tmp_dir.join("input.dat"); + + // Clean up on exit + let _cleanup = CleanupDir(tmp_dir.clone()); + + // Write private key to temp file (restricted permissions) + { + use std::os::unix::fs::OpenOptionsExt; + let mut f = std::fs::OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .mode(0o600) + .open(&input_file) + .map_err(|e| format!("Failed to write temp file: {e}"))?; + f.write_all(private_key_der) + .map_err(|e| format!("Failed to write temp file: {e}"))?; + } + + // Step 1: Create SRK (Storage Root Key) — deterministic + run_tpm2_command( + "tpm2_createprimary", + &["-C", "o", "-g", "sha256", "-G", "ecc256", "-c"], + Some(&srk_ctx), + )?; + + // Step 2: Seal the private key under the SRK + run_tpm2_command_with_args( + "tpm2_create", + &[ + "-C", srk_ctx.to_str().unwrap(), + "-i", input_file.to_str().unwrap(), + "-u", sealed_pub.to_str().unwrap(), + "-r", sealed_priv.to_str().unwrap(), + ], + )?; + + // Step 3: Read the sealed blobs + let pub_data = std::fs::read(&sealed_pub) + .map_err(|e| format!("Failed to read sealed public blob: {e}"))?; + let priv_data = std::fs::read(&sealed_priv) + .map_err(|e| format!("Failed to read sealed private blob: {e}"))?; + + // Pack into a single blob: pub_len(4 LE) || pub_data || priv_data + let mut output = Vec::with_capacity(4 + pub_data.len() + priv_data.len()); + output.extend_from_slice(&(pub_data.len() as u32).to_le_bytes()); + output.extend_from_slice(&pub_data); + output.extend_from_slice(&priv_data); + + Ok(output) +} + +/// Unprotect a private key by unsealing it with the TPM. +pub fn tpm2_unprotect(sealed_blob: &[u8]) -> Result, String> { + if sealed_blob.len() < 4 { + return Err("Sealed blob too short".into()); + } + + // Parse: pub_len(4 LE) || pub_data || priv_data + let pub_len = u32::from_le_bytes(sealed_blob[..4].try_into().unwrap()) as usize; + if sealed_blob.len() < 4 + pub_len { + return Err("Sealed blob truncated".into()); + } + let pub_data = &sealed_blob[4..4 + pub_len]; + let priv_data = &sealed_blob[4 + pub_len..]; + + let tmp_dir = std::env::temp_dir().join(format!("varlock-tpm-{}", std::process::id())); + std::fs::create_dir_all(&tmp_dir) + .map_err(|e| format!("Failed to create temp dir: {e}"))?; + + let srk_ctx = tmp_dir.join("srk.ctx"); + let sealed_pub = tmp_dir.join("sealed.pub"); + let sealed_priv = tmp_dir.join("sealed.priv"); + let sealed_ctx = tmp_dir.join("sealed.ctx"); + + let _cleanup = CleanupDir(tmp_dir.clone()); + + // Write sealed blobs to temp files + std::fs::write(&sealed_pub, pub_data) + .map_err(|e| format!("Failed to write sealed pub: {e}"))?; + std::fs::write(&sealed_priv, priv_data) + .map_err(|e| format!("Failed to write sealed priv: {e}"))?; + + // Step 1: Recreate SRK (deterministic — same params = same key) + run_tpm2_command( + "tpm2_createprimary", + &["-C", "o", "-g", "sha256", "-G", "ecc256", "-c"], + Some(&srk_ctx), + )?; + + // Step 2: Load the sealed object + run_tpm2_command_with_args( + "tpm2_load", + &[ + "-C", srk_ctx.to_str().unwrap(), + "-u", sealed_pub.to_str().unwrap(), + "-r", sealed_priv.to_str().unwrap(), + "-c", sealed_ctx.to_str().unwrap(), + ], + )?; + + // Step 3: Unseal + let output = Command::new("tpm2_unseal") + .args(["-c", sealed_ctx.to_str().unwrap()]) + .output() + .map_err(|e| format!("Failed to run tpm2_unseal: {e}"))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(format!("TPM2 unseal failed: {stderr}")); + } + + Ok(output.stdout) +} + +// ── Helpers ────────────────────────────────────────────────────── + +fn run_tpm2_command(cmd: &str, args: &[&str], ctx_path: Option<&std::path::Path>) -> Result<(), String> { + let mut command = Command::new(cmd); + command.args(args); + if let Some(ctx) = ctx_path { + command.arg(ctx); + } + + let output = command + .output() + .map_err(|e| format!("Failed to run {cmd}: {e}"))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(format!("{cmd} failed: {stderr}")); + } + + Ok(()) +} + +fn run_tpm2_command_with_args(cmd: &str, args: &[&str]) -> Result<(), String> { + let output = Command::new(cmd) + .args(args) + .output() + .map_err(|e| format!("Failed to run {cmd}: {e}"))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(format!("{cmd} failed: {stderr}")); + } + + Ok(()) +} + +/// RAII cleanup for temp directories. +struct CleanupDir(std::path::PathBuf); + +impl Drop for CleanupDir { + fn drop(&mut self) { + let _ = std::fs::remove_dir_all(&self.0); + } +} + +/// Get a user-friendly hint about why TPM2 isn't available. +pub fn get_tpm2_setup_hint() -> Option { + match check_tpm2_status() { + Tpm2Status::Available => None, + Tpm2Status::ToolsNotInstalled => Some( + "TPM2 hardware may be available but tpm2-tools is not installed.\n\ + Install with: sudo apt install tpm2-tools (Debian/Ubuntu)\n\ + or: sudo dnf install tpm2-tools (Fedora)\n\ + or: sudo pacman -S tpm2-tools (Arch)" + .into(), + ), + Tpm2Status::NoDevice => Some( + "No TPM2 device found (/dev/tpmrm0). TPM may need to be enabled in BIOS.".into(), + ), + Tpm2Status::PermissionDenied => Some( + "TPM2 device exists but access denied.\n\ + Add your user to the tss group: sudo usermod -aG tss $USER\n\ + Then log out and back in." + .into(), + ), + Tpm2Status::SrkFailed(e) => Some(format!( + "TPM2 device accessible but key creation failed: {e}" + )), + } +} diff --git a/packages/encryption-binary-rust/src/key_store/mod.rs b/packages/encryption-binary-rust/src/key_store/mod.rs new file mode 100644 index 00000000..f1f0eabc --- /dev/null +++ b/packages/encryption-binary-rust/src/key_store/mod.rs @@ -0,0 +1,368 @@ +//! Key storage abstraction. +//! +//! Each platform backend stores the P-256 private key in a protected manner: +//! - Windows: DPAPI (CryptProtectData) — encrypted to the current user session +//! - Linux: TPM2 seal/unseal via tpm2-tools — key sealed to hardware TPM chip +//! +//! All backends store the public key as plaintext (it's not secret) and the +//! private key in a platform-specific protected format. The key file format is: +//! +//! ~/.config/varlock/local-encrypt/keys/{keyId}.json +//! { +//! "keyId": "varlock-default", +//! "publicKey": "", +//! "protectedPrivateKey": "", +//! "protection": "dpapi" | "tpm2" | "none", +//! "createdAt": "2024-01-01T00:00:00Z" +//! } +//! +//! The "none" protection level stores the private key as plaintext base64 — +//! equivalent to the JS file-based backend. Used as an absolute fallback. + +use base64::{engine::general_purpose::STANDARD as BASE64, Engine}; +use serde::{Deserialize, Serialize}; +use std::fs; +use std::path::PathBuf; + +#[cfg(target_os = "linux")] +pub(crate) mod linux; +#[cfg(target_os = "windows")] +mod windows; +#[cfg(target_os = "windows")] +pub(crate) mod windows_hello; + +/// Which protection mechanism is used for the private key. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum Protection { + /// Windows DPAPI — encrypted to current user session + Dpapi, + /// Linux TPM2 — sealed to hardware TPM chip + Tpm2, + /// No protection — plaintext on disk (fallback) + None, +} + +impl std::fmt::Display for Protection { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Protection::Dpapi => write!(f, "dpapi"), + Protection::Tpm2 => write!(f, "tpm2"), + Protection::None => write!(f, "none"), + } + } +} + +/// Stored key file format (JSON). +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct StoredKey { + pub key_id: String, + /// Base64 uncompressed P-256 public key (65 bytes raw) + pub public_key: String, + /// Base64 protected private key (protection-dependent format) + pub protected_private_key: String, + /// How the private key is protected + pub protection: Protection, + pub created_at: String, +} + +/// Information about what key protection is available on this platform. +pub struct PlatformInfo { + /// Backend name for status output + pub backend: String, + /// Whether keys are hardware-backed (TPM) + pub hardware_backed: bool, + /// Whether biometric unlock is available + pub biometric_available: bool, + /// What protection will be used for new keys + pub protection: Protection, +} + +// ── Path helpers ────────────────────────────────────────────────── + +fn get_config_dir() -> PathBuf { + if let Ok(xdg) = std::env::var("XDG_CONFIG_HOME") { + return PathBuf::from(xdg).join("varlock"); + } + + let home = dirs_home(); + + // Backwards compat: if ~/.varlock exists, use it + let legacy = home.join(".varlock"); + if legacy.exists() { + return legacy; + } + + // Default: ~/.config/varlock (XDG standard) + home.join(".config").join("varlock") +} + +fn dirs_home() -> PathBuf { + #[cfg(target_os = "windows")] + { + std::env::var("USERPROFILE") + .map(PathBuf::from) + .unwrap_or_else(|_| PathBuf::from("C:\\Users\\Default")) + } + #[cfg(not(target_os = "windows"))] + { + std::env::var("HOME") + .map(PathBuf::from) + .unwrap_or_else(|_| PathBuf::from("/tmp")) + } +} + +fn get_key_store_dir() -> PathBuf { + get_config_dir().join("local-encrypt").join("keys") +} + +fn get_key_file_path(key_id: &str) -> PathBuf { + get_key_store_dir().join(format!("{key_id}.json")) +} + +// ── Platform-specific key protection ───────────────────────────── + +/// Protect a private key using the best available platform mechanism. +/// Returns (protected_bytes_base64, protection_type). +fn protect_private_key(private_key_der: &[u8]) -> (String, Protection) { + #[cfg(target_os = "windows")] + { + match windows::dpapi_protect(private_key_der) { + Ok(protected) => (BASE64.encode(&protected), Protection::Dpapi), + Err(e) => { + eprintln!("Warning: DPAPI protection failed ({e}), falling back to plaintext"); + (BASE64.encode(private_key_der), Protection::None) + } + } + } + + #[cfg(target_os = "linux")] + { + if linux::is_tpm2_available() { + match linux::tpm2_protect(private_key_der) { + Ok(protected) => (BASE64.encode(&protected), Protection::Tpm2), + Err(e) => { + eprintln!("Warning: TPM2 protection failed ({e}), falling back to plaintext"); + (BASE64.encode(private_key_der), Protection::None) + } + } + } else { + // TPM2 not available — plaintext fallback + if let Some(hint) = linux::get_tpm2_setup_hint() { + eprintln!("Note: {hint}"); + } + (BASE64.encode(private_key_der), Protection::None) + } + } + + #[cfg(not(any(target_os = "windows", target_os = "linux")))] + { + // Unsupported platform — plaintext fallback + (BASE64.encode(private_key_der), Protection::None) + } +} + +/// Unprotect a private key, returning the raw PKCS8 DER bytes. +fn unprotect_private_key(protected_base64: &str, protection: &Protection) -> Result, String> { + let protected_bytes = BASE64 + .decode(protected_base64) + .map_err(|e| format!("Invalid base64: {e}"))?; + + match protection { + Protection::None => Ok(protected_bytes), + + #[cfg(target_os = "windows")] + Protection::Dpapi => windows::dpapi_unprotect(&protected_bytes), + + #[cfg(target_os = "linux")] + Protection::Tpm2 => linux::tpm2_unprotect(&protected_bytes), + + #[allow(unreachable_patterns)] + _ => Err(format!("Protection type '{protection}' not supported on this platform")), + } +} + +// ── Public API ─────────────────────────────────────────────────── + +/// Detect the platform capabilities and return status info. +pub fn get_platform_info() -> PlatformInfo { + #[cfg(target_os = "windows")] + { + let hello_available = windows_hello::is_hello_available(); + PlatformInfo { + backend: if hello_available { "windows-hello" } else { "windows-dpapi" }.into(), + hardware_backed: false, // DPAPI is software-based; TPM NCrypt is TODO + biometric_available: hello_available, + protection: Protection::Dpapi, + } + } + + #[cfg(target_os = "linux")] + { + let tpm2_available = linux::is_tpm2_available(); + PlatformInfo { + backend: if tpm2_available { "linux-tpm2" } else { "linux-file" }.into(), + hardware_backed: tpm2_available, + biometric_available: false, // fprintd integration is TODO + protection: if tpm2_available { Protection::Tpm2 } else { Protection::None }, + } + } + + #[cfg(not(any(target_os = "windows", target_os = "linux")))] + { + PlatformInfo { + backend: "file".into(), + hardware_backed: false, + biometric_available: false, + protection: Protection::None, + } + } +} + +/// Get a setup hint for TPM2 if it could be available but isn't configured. +#[cfg(target_os = "linux")] +pub fn get_tpm2_setup_hint() -> Option { + linux::get_tpm2_setup_hint() +} + +/// Check if a key exists. +pub fn key_exists(key_id: &str) -> bool { + get_key_file_path(key_id).exists() +} + +/// List all key IDs. +pub fn list_keys() -> Vec { + let dir = get_key_store_dir(); + let entries = match fs::read_dir(&dir) { + Ok(entries) => entries, + Err(_) => return vec![], + }; + + entries + .filter_map(|e| e.ok()) + .filter_map(|e| { + let name = e.file_name().to_string_lossy().to_string(); + name.strip_suffix(".json").map(|s| s.to_string()) + }) + .collect() +} + +/// Generate a new key pair and store it with platform-specific protection. +/// Returns the base64 public key. +pub fn generate_key(key_id: &str) -> Result { + let key_pair = crate::crypto::generate_key_pair()?; + + // Decode the private key to protect it + let private_key_der = BASE64 + .decode(&key_pair.private_key) + .map_err(|e| format!("Failed to decode private key: {e}"))?; + + let (protected, protection) = protect_private_key(&private_key_der); + + let stored = StoredKey { + key_id: key_id.to_string(), + public_key: key_pair.public_key.clone(), + protected_private_key: protected, + protection, + created_at: now_iso8601(), + }; + + // Write to disk + let dir = get_key_store_dir(); + fs::create_dir_all(&dir).map_err(|e| format!("Failed to create key store: {e}"))?; + + let path = get_key_file_path(key_id); + let json = serde_json::to_string_pretty(&stored) + .map_err(|e| format!("Failed to serialize key: {e}"))?; + + // Write with restricted permissions + #[cfg(unix)] + { + use std::os::unix::fs::OpenOptionsExt; + let mut opts = fs::OpenOptions::new(); + opts.write(true).create(true).truncate(true).mode(0o600); + use std::io::Write; + let mut file = opts.open(&path).map_err(|e| format!("Failed to write key file: {e}"))?; + file.write_all(json.as_bytes()) + .map_err(|e| format!("Failed to write key file: {e}"))?; + } + #[cfg(not(unix))] + { + fs::write(&path, &json).map_err(|e| format!("Failed to write key file: {e}"))?; + } + + Ok(key_pair.public_key) +} + +/// Delete a key. +pub fn delete_key(key_id: &str) -> bool { + let path = get_key_file_path(key_id); + fs::remove_file(path).is_ok() +} + +/// Load a stored key and return (private_key_der, public_key_base64). +pub fn load_key(key_id: &str) -> Result<(Vec, String), String> { + let path = get_key_file_path(key_id); + let data = fs::read_to_string(&path).map_err(|_| format!("Key not found: {key_id}"))?; + let stored: StoredKey = + serde_json::from_str(&data).map_err(|e| format!("Corrupted key file: {e}"))?; + + let private_key_der = unprotect_private_key(&stored.protected_private_key, &stored.protection)?; + Ok((private_key_der, stored.public_key)) +} + +/// Load just the public key (no protection needed). +pub fn load_public_key(key_id: &str) -> Result { + let path = get_key_file_path(key_id); + let data = fs::read_to_string(&path).map_err(|_| format!("Key not found: {key_id}"))?; + let stored: StoredKey = + serde_json::from_str(&data).map_err(|e| format!("Corrupted key file: {e}"))?; + Ok(stored.public_key) +} + +fn now_iso8601() -> String { + // Simple ISO 8601 without external crate + let duration = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default(); + let secs = duration.as_secs(); + // Approximate UTC — good enough for metadata + let days = secs / 86400; + let time_of_day = secs % 86400; + let hours = time_of_day / 3600; + let minutes = (time_of_day % 3600) / 60; + let seconds = time_of_day % 60; + + // Calculate year/month/day from days since epoch (simplified) + let mut y = 1970i64; + let mut remaining_days = days as i64; + loop { + let days_in_year = if is_leap_year(y) { 366 } else { 365 }; + if remaining_days < days_in_year { + break; + } + remaining_days -= days_in_year; + y += 1; + } + let mut m = 1u32; + let days_in_months = if is_leap_year(y) { + [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] + } else { + [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] + }; + for dim in days_in_months { + if remaining_days < dim { + break; + } + remaining_days -= dim; + m += 1; + } + let d = remaining_days + 1; + + format!("{y:04}-{m:02}-{d:02}T{hours:02}:{minutes:02}:{seconds:02}Z") +} + +fn is_leap_year(y: i64) -> bool { + (y % 4 == 0 && y % 100 != 0) || y % 400 == 0 +} diff --git a/packages/encryption-binary-rust/src/key_store/windows.rs b/packages/encryption-binary-rust/src/key_store/windows.rs new file mode 100644 index 00000000..886ee45c --- /dev/null +++ b/packages/encryption-binary-rust/src/key_store/windows.rs @@ -0,0 +1,124 @@ +//! Windows key protection using DPAPI (CryptProtectData / CryptUnprotectData). +//! +//! DPAPI encrypts data to the current Windows user account. The encrypted blob +//! can only be decrypted by the same user on the same machine. No additional +//! credentials are needed at decrypt time — the user's login session provides +//! the decryption key. +//! +//! This is the same mechanism used by Chrome, Edge, and other apps to protect +//! stored passwords and cookies. +//! +//! Security properties: +//! - Key is never stored as plaintext on disk +//! - Encrypted to the current user's master key (derived from password) +//! - Cannot be decrypted by other users or on other machines +//! - Survives reboots (unlike Linux keyring) +//! - Does NOT require TPM (software-only, but user-scoped) + +use windows::Win32::Security::Cryptography::{ + CryptProtectData, CryptUnprotectData, CRYPT_INTEGER_BLOB, + CRYPTPROTECT_UI_FORBIDDEN, +}; +use windows::Win32::System::Memory::LocalFree; +use std::ptr; + +const DPAPI_DESCRIPTION: &str = "Varlock Local Encryption Key"; + +/// Encrypt data using DPAPI (CryptProtectData). +/// Returns the encrypted blob. +pub fn dpapi_protect(plaintext: &[u8]) -> Result, String> { + let mut data_in = CRYPT_INTEGER_BLOB { + cbData: plaintext.len() as u32, + pbData: plaintext.as_ptr() as *mut u8, + }; + + // Optional entropy — we use the description as additional context + let entropy_bytes: Vec = DPAPI_DESCRIPTION.encode_utf16().chain(std::iter::once(0)).collect(); + let entropy_u8: Vec = entropy_bytes.iter().flat_map(|w| w.to_le_bytes()).collect(); + let mut entropy = CRYPT_INTEGER_BLOB { + cbData: entropy_u8.len() as u32, + pbData: entropy_u8.as_ptr() as *mut u8, + }; + + let mut data_out = CRYPT_INTEGER_BLOB { + cbData: 0, + pbData: ptr::null_mut(), + }; + + let description: Vec = DPAPI_DESCRIPTION.encode_utf16().chain(std::iter::once(0)).collect(); + + let result = unsafe { + CryptProtectData( + &data_in, + windows::core::PCWSTR(description.as_ptr()), + Some(&entropy), + None, // reserved + None, // no prompt + CRYPTPROTECT_UI_FORBIDDEN, // no UI + &mut data_out, + ) + }; + + if result.is_err() { + return Err(format!("CryptProtectData failed: {:?}", result.err())); + } + + // Copy result and free the DPAPI-allocated buffer + let encrypted = unsafe { + let slice = std::slice::from_raw_parts(data_out.pbData, data_out.cbData as usize); + let vec = slice.to_vec(); + let _ = LocalFree(data_out.pbData as isize); + vec + }; + + Ok(encrypted) +} + +/// Decrypt data using DPAPI (CryptUnprotectData). +/// Returns the decrypted plaintext bytes. +pub fn dpapi_unprotect(encrypted: &[u8]) -> Result, String> { + let mut data_in = CRYPT_INTEGER_BLOB { + cbData: encrypted.len() as u32, + pbData: encrypted.as_ptr() as *mut u8, + }; + + let entropy_bytes: Vec = DPAPI_DESCRIPTION.encode_utf16().chain(std::iter::once(0)).collect(); + let entropy_u8: Vec = entropy_bytes.iter().flat_map(|w| w.to_le_bytes()).collect(); + let mut entropy = CRYPT_INTEGER_BLOB { + cbData: entropy_u8.len() as u32, + pbData: entropy_u8.as_ptr() as *mut u8, + }; + + let mut data_out = CRYPT_INTEGER_BLOB { + cbData: 0, + pbData: ptr::null_mut(), + }; + + let result = unsafe { + CryptUnprotectData( + &data_in, + None, // description out (don't need it) + Some(&entropy), + None, // reserved + None, // no prompt + CRYPTPROTECT_UI_FORBIDDEN, // no UI + &mut data_out, + ) + }; + + if result.is_err() { + return Err(format!( + "CryptUnprotectData failed — key may have been encrypted by a different user: {:?}", + result.err() + )); + } + + let decrypted = unsafe { + let slice = std::slice::from_raw_parts(data_out.pbData, data_out.cbData as usize); + let vec = slice.to_vec(); + let _ = LocalFree(data_out.pbData as isize); + vec + }; + + Ok(decrypted) +} diff --git a/packages/encryption-binary-rust/src/key_store/windows_hello.rs b/packages/encryption-binary-rust/src/key_store/windows_hello.rs new file mode 100644 index 00000000..2711df6a --- /dev/null +++ b/packages/encryption-binary-rust/src/key_store/windows_hello.rs @@ -0,0 +1,102 @@ +//! Windows Hello biometric verification. +//! +//! Uses the WinRT `UserConsentVerifier` API to show the Windows Hello dialog +//! (face recognition, fingerprint, or PIN). This is the same dialog that +//! Windows uses for login and app authentication. +//! +//! The verification is decoupled from key storage (DPAPI handles that). +//! This module purely handles user presence verification. + +use windows::Security::Credentials::UI::{ + UserConsentVerificationResult, UserConsentVerifier, UserConsentVerifierAvailability, +}; + +/// Check if Windows Hello is available and configured. +pub fn is_hello_available() -> bool { + match UserConsentVerifier::CheckAvailabilityAsync() { + Ok(op) => match op.get() { + Ok(availability) => availability == UserConsentVerifierAvailability::Available, + Err(_) => false, + }, + Err(_) => false, + } +} + +/// Detailed availability check for status reporting. +pub fn get_hello_status() -> HelloStatus { + match UserConsentVerifier::CheckAvailabilityAsync() { + Ok(op) => match op.get() { + Ok(availability) => match availability { + UserConsentVerifierAvailability::Available => HelloStatus::Available, + UserConsentVerifierAvailability::DeviceNotPresent => HelloStatus::NoDevice, + UserConsentVerifierAvailability::NotConfiguredForUser => HelloStatus::NotConfigured, + UserConsentVerifierAvailability::DisabledByPolicy => HelloStatus::DisabledByPolicy, + _ => HelloStatus::Unknown, + }, + Err(e) => HelloStatus::Error(format!("{e}")), + }, + Err(e) => HelloStatus::Error(format!("{e}")), + } +} + +pub enum HelloStatus { + Available, + NoDevice, + NotConfigured, + DisabledByPolicy, + Unknown, + Error(String), +} + +/// Request user verification via Windows Hello. +/// +/// Shows the Windows Hello dialog with the given message. +/// Returns Ok(true) if verified, Ok(false) if cancelled, Err on failure. +pub fn verify_user(message: &str) -> Result { + let message = windows::core::HSTRING::from(message); + + let op = UserConsentVerifier::RequestVerificationAsync(&message) + .map_err(|e| format!("Failed to request verification: {e}"))?; + + let result = op.get().map_err(|e| format!("Verification failed: {e}"))?; + + match result { + UserConsentVerificationResult::Verified => Ok(true), + UserConsentVerificationResult::Canceled => Ok(false), + UserConsentVerificationResult::DeviceNotPresent => { + Err("Windows Hello device not present".into()) + } + UserConsentVerificationResult::NotConfiguredForUser => { + Err("Windows Hello not configured".into()) + } + UserConsentVerificationResult::DisabledByPolicy => { + Err("Windows Hello disabled by policy".into()) + } + UserConsentVerificationResult::DeviceBusy => Err("Windows Hello device busy".into()), + UserConsentVerificationResult::RetriesExhausted => { + Err("Windows Hello retries exhausted".into()) + } + _ => Err("Unknown verification result".into()), + } +} + +/// Get a setup hint if Windows Hello could be available. +pub fn get_setup_hint() -> Option { + match get_hello_status() { + HelloStatus::Available => None, + HelloStatus::NoDevice => Some( + "No Windows Hello compatible device found.\n\ + Windows Hello requires a fingerprint reader, IR camera, or compatible security key." + .into(), + ), + HelloStatus::NotConfigured => Some( + "Windows Hello is available but not set up.\n\ + Configure it in Settings > Accounts > Sign-in options." + .into(), + ), + HelloStatus::DisabledByPolicy => Some( + "Windows Hello is disabled by group policy.".into(), + ), + _ => None, + } +} diff --git a/packages/encryption-binary-rust/src/main.rs b/packages/encryption-binary-rust/src/main.rs new file mode 100644 index 00000000..3fe7b885 --- /dev/null +++ b/packages/encryption-binary-rust/src/main.rs @@ -0,0 +1,246 @@ +//! varlock-local-encrypt — Cross-platform local encryption binary for Varlock. +//! +//! Provides the same CLI interface as the Swift macOS binary: +//! generate-key, delete-key, list-keys, key-exists, encrypt, decrypt, status, daemon +//! +//! All output is JSON. Errors return {"error": "message"}. + +mod crypto; +mod daemon; +mod ipc; +mod key_store; + +use base64::{engine::general_purpose::STANDARD as BASE64, Engine}; +use serde_json::json; + +const DEFAULT_KEY_ID: &str = "varlock-default"; + +fn main() { + let args: Vec = std::env::args().collect(); + let command = args.get(1).map(|s| s.as_str()).unwrap_or("help"); + + match command { + "generate-key" => cmd_generate_key(&args), + "delete-key" => cmd_delete_key(&args), + "list-keys" => cmd_list_keys(), + "key-exists" => cmd_key_exists(&args), + "encrypt" => cmd_encrypt(&args), + "decrypt" => cmd_decrypt(&args), + "status" => cmd_status(), + "daemon" => cmd_daemon(&args), + "help" | "--help" | "-h" => cmd_help(), + _ => json_error(&format!("Unknown command: {command}. Run with --help for usage.")), + } +} + +// ── CLI arg helpers ────────────────────────────────────────────── + +fn get_arg(args: &[String], flag: &str) -> Option { + args.iter() + .position(|a| a == flag) + .and_then(|i| args.get(i + 1).cloned()) +} + +fn get_key_id(args: &[String]) -> String { + get_arg(args, "--key-id").unwrap_or_else(|| DEFAULT_KEY_ID.to_string()) +} + +// ── JSON output helpers ───────────────────────────────────────── + +fn json_output(value: &serde_json::Value) { + println!("{}", serde_json::to_string(value).unwrap_or_default()); +} + +fn json_error(message: &str) -> ! { + json_output(&json!({"error": message})); + std::process::exit(1); +} + +fn json_success(result: serde_json::Value) -> ! { + let mut obj = json!({"ok": true}); + if let (Some(base), Some(extra)) = (obj.as_object_mut(), result.as_object()) { + for (k, v) in extra { + base.insert(k.clone(), v.clone()); + } + } + json_output(&obj); + std::process::exit(0); +} + +// ── Commands ──────────────────────────────────────────────────── + +fn cmd_generate_key(args: &[String]) { + let key_id = get_key_id(args); + + match key_store::generate_key(&key_id) { + Ok(public_key) => { + let pub_bytes = BASE64.decode(&public_key).unwrap_or_default(); + json_success(json!({ + "keyId": key_id, + "publicKey": public_key, + "publicKeyBytes": pub_bytes.len(), + })); + } + Err(e) => json_error(&e), + } +} + +fn cmd_delete_key(args: &[String]) { + let key_id = get_key_id(args); + let deleted = key_store::delete_key(&key_id); + json_success(json!({ + "keyId": key_id, + "deleted": deleted, + })); +} + +fn cmd_list_keys() { + let keys = key_store::list_keys(); + json_success(json!({"keys": keys})); +} + +fn cmd_key_exists(args: &[String]) { + let key_id = get_key_id(args); + let exists = key_store::key_exists(&key_id); + json_success(json!({ + "keyId": key_id, + "exists": exists, + })); +} + +fn cmd_encrypt(args: &[String]) { + let key_id = get_key_id(args); + + let data_b64 = match get_arg(args, "--data") { + Some(d) => d, + None => json_error("Missing --data argument (base64-encoded plaintext)"), + }; + + let plaintext = match BASE64.decode(&data_b64) { + Ok(d) => d, + Err(_) => json_error("Invalid base64 data"), + }; + + // Load just the public key (no private key access needed) + let public_key = match key_store::load_public_key(&key_id) { + Ok(pk) => pk, + Err(e) => json_error(&e), + }; + + match crypto::encrypt(&public_key, &plaintext) { + Ok(ciphertext) => json_success(json!({"ciphertext": ciphertext})), + Err(e) => json_error(&e), + } +} + +fn cmd_decrypt(args: &[String]) { + let key_id = get_key_id(args); + + let data_b64 = match get_arg(args, "--data") { + Some(d) => d, + None => json_error("Missing --data argument (base64-encoded ciphertext)"), + }; + + // Load the full key pair (private key needed) + let (private_key_der, public_key_b64) = match key_store::load_key(&key_id) { + Ok(k) => k, + Err(e) => json_error(&e), + }; + + let private_key_b64 = BASE64.encode(&private_key_der); + + match crypto::decrypt(&private_key_b64, &public_key_b64, &data_b64) { + Ok(plaintext_bytes) => { + let plaintext = match String::from_utf8(plaintext_bytes) { + Ok(s) => s, + Err(_) => json_error("Decrypted data is not valid UTF-8"), + }; + json_success(json!({"plaintext": plaintext})); + } + Err(e) => json_error(&e), + } +} + +fn cmd_status() { + let info = key_store::get_platform_info(); + let keys = key_store::list_keys(); + + #[allow(unused_mut)] + let mut result = json!({ + "backend": info.backend, + "hardwareBacked": info.hardware_backed, + "biometricAvailable": info.biometric_available, + "protection": info.protection.to_string(), + "platform": std::env::consts::OS, + "arch": std::env::consts::ARCH, + "keys": keys, + }); + + // Include setup hints for optional features + #[cfg(target_os = "linux")] + { + if !info.hardware_backed { + if let Some(hint) = key_store::get_tpm2_setup_hint() { + result.as_object_mut().unwrap().insert( + "setupHint".to_string(), + serde_json::Value::String(hint), + ); + } + } + } + #[cfg(target_os = "windows")] + { + if !info.biometric_available { + if let Some(hint) = key_store::windows_hello::get_setup_hint() { + result.as_object_mut().unwrap().insert( + "setupHint".to_string(), + serde_json::Value::String(hint), + ); + } + } + } + + json_success(result); +} + +fn cmd_daemon(args: &[String]) { + let socket_path = match get_arg(args, "--socket-path") { + Some(sp) => sp, + None => json_error("Missing --socket-path argument"), + }; + + let pid_path = get_arg(args, "--pid-path"); + + if let Err(e) = daemon::run_daemon(&socket_path, pid_path.as_deref()) { + json_error(&format!("Failed to start daemon: {e}")); + } +} + +fn cmd_help() { + let help = r#"varlock-local-encrypt - Cross-platform local encryption for Varlock + +COMMANDS: + generate-key [--key-id ] Create a new encryption key + delete-key [--key-id ] Delete an encryption key + list-keys List all Varlock encryption keys + key-exists [--key-id ] Check if a key exists + encrypt --data [--key-id ] Encrypt data (one-shot) + decrypt --data [--key-id ] Decrypt data (one-shot) + status Check platform capabilities + daemon --socket-path [--pid-path ] Start IPC daemon + +OPTIONS: + --key-id Key identifier (default: varlock-default) + --data Base64-encoded data + --socket-path Unix socket path for daemon mode + --pid-path PID file path for daemon mode + +PLATFORM PROTECTION: + Windows: DPAPI (user-session-scoped encryption) + Linux: Kernel keyring (key held in kernel memory) + +All output is JSON. Errors return {"error": "message"}. +"#; + print!("{help}"); + std::process::exit(0); +} diff --git a/packages/encryption-binary-swift/.env.schema b/packages/encryption-binary-swift/.env.schema new file mode 100644 index 00000000..ab7c4933 --- /dev/null +++ b/packages/encryption-binary-swift/.env.schema @@ -0,0 +1,22 @@ +# @defaultSensitive=false @defaultRequired=infer +# @plugin(@varlock/1password-plugin) +# @initOp(allowAppAuth=true, token=$OP_CI_TOKEN) +# --- + +# this must be set in github actions secrets +# @type=opServiceAccountToken @sensitive +OP_CI_TOKEN= + +# Apple code signing - used in CI to sign the macOS native binary +# @sensitive +APPLE_CERTIFICATE_BASE64=op("op://VarlockCI/apple developer/APPLE_CERTIFICATE_BASE64") +# @sensitive +APPLE_CERTIFICATE_PASSWORD=op("op://VarlockCI/apple developer/APPLE_CERTIFICATE_PASSWORD") +APPLE_SIGNING_IDENTITY=op("op://VarlockCI/apple developer/APPLE_SIGNING_IDENTITY") +APPLE_TEAM_ID=op("op://VarlockCI/apple developer/APPLE_TEAM_ID") + +# Apple notarization +# @sensitive +APPLE_ID=op("op://VarlockCI/apple developer/APPLE_NOTARIZATION_APPLE_ID") +# @sensitive +APPLE_APP_PASSWORD=op("op://VarlockCI/apple developer/APPLE_NOTARIZATION_APP_PASSWORD") diff --git a/packages/encryption-binary-swift/.gitignore b/packages/encryption-binary-swift/.gitignore new file mode 100644 index 00000000..74487071 --- /dev/null +++ b/packages/encryption-binary-swift/.gitignore @@ -0,0 +1 @@ +swift/.build diff --git a/packages/encryption-binary-swift/README.md b/packages/encryption-binary-swift/README.md new file mode 100644 index 00000000..b2a2163b --- /dev/null +++ b/packages/encryption-binary-swift/README.md @@ -0,0 +1,32 @@ +# @varlock/encryption-binary-swift + +macOS native binary for varlock's local encryption, built in Swift. + +## Why Swift? + +Varlock uses the **Secure Enclave** for hardware-backed key storage on macOS. The Secure Enclave, Touch ID biometric prompts, and native UI (status bar menu, secure input dialogs) are only accessible through Apple's `Security`, `LocalAuthentication`, and `AppKit` frameworks — which are designed for Swift/Objective-C. Rust or other languages would require fragile FFI bindings with no stable C ABI to target. + +The `.app` bundle format is also required for custom Touch ID icons, `LSUIElement` (menu-bar-only) behavior, and proper code signing + notarization. + +Rust is planned for Windows (TPM / Windows Hello) and Linux (TPM2), where the platform APIs have C-friendly interfaces. The IPC protocol (length-prefixed JSON over a Unix socket) is the same across all platforms. + +## Structure + +- `swift/` — Swift Package Manager project (`VarlockEnclave` executable) +- `scripts/build-swift.ts` — Two-phase build: compile (cacheable) + bundle (mode-specific `.app` wrapping + codesign) +- `resources/` — App icon and other bundle resources + +## Building + +```bash +# Local dev (current arch, dev mode) +bun run build:swift:dev + +# Universal binary (arm64 + x86_64, for CI) +bun run build:swift + +# With signing and release metadata +bun run build:swift -- --mode release --version 1.2.3 --sign "Developer ID Application: ..." +``` + +Output: `packages/varlock/native-bins/darwin/VarlockEnclave.app` diff --git a/packages/encryption-binary-swift/package.json b/packages/encryption-binary-swift/package.json new file mode 100644 index 00000000..f6bd7f1f --- /dev/null +++ b/packages/encryption-binary-swift/package.json @@ -0,0 +1,18 @@ +{ + "name": "@varlock/encryption-binary-swift", + "description": "macOS Secure Enclave encryption binary for varlock (Swift)", + "version": "0.0.1", + "private": true, + "scripts": { + "kill-daemon": "bun run scripts/kill-daemon.ts", + "build:swift": "bun run kill-daemon && bun run scripts/build-swift.ts --universal", + "build:swift:dev": "bun run kill-daemon && bun run scripts/build-swift.ts", + "clean": "rm -rf swift/.build" + }, + "devDependencies": { + "@varlock/1password-plugin": "workspace:*", + "varlock": "workspace:*" + }, + "author": "dmno-dev", + "license": "MIT" +} diff --git a/packages/encryption-binary-swift/resources/AppIcon.icns b/packages/encryption-binary-swift/resources/AppIcon.icns new file mode 100644 index 00000000..b1fc5d23 Binary files /dev/null and b/packages/encryption-binary-swift/resources/AppIcon.icns differ diff --git a/packages/encryption-binary-swift/scripts/build-swift.ts b/packages/encryption-binary-swift/scripts/build-swift.ts new file mode 100644 index 00000000..0d419cd6 --- /dev/null +++ b/packages/encryption-binary-swift/scripts/build-swift.ts @@ -0,0 +1,181 @@ +#!/usr/bin/env bun + +/** + * Build script for the VarlockEnclave Swift binary. + * + * Two-phase build: + * 1. Compile — produces a universal (or single-arch) binary. This is the slow + * step and is cached in CI by source hash. + * 2. Bundle — wraps the binary in a .app bundle with environment-specific + * metadata (name, version, bundle ID) and codesigns it. This is fast and + * can vary per build mode without recompiling. + * + * Usage: + * bun run scripts/build-swift.ts # dev build (current arch) + * bun run scripts/build-swift.ts --universal # universal binary (CI) + * bun run scripts/build-swift.ts --mode release # production bundle metadata + * bun run scripts/build-swift.ts --sign "Developer ID Application: ..." + * bun run scripts/build-swift.ts --version 1.2.3 # set bundle version + */ + +import { execSync } from 'node:child_process'; +import path from 'node:path'; +import fs from 'node:fs'; + +// ── CLI args ──────────────────────────────────────────────────── + +const args = process.argv.slice(2); + +function getArg(flag: string): string | undefined { + const idx = args.indexOf(flag); + return idx >= 0 ? args[idx + 1] : undefined; +} + +const universal = args.includes('--universal'); +const signingIdentity = getArg('--sign'); +const mode = (getArg('--mode') ?? 'dev') as 'dev' | 'preview' | 'release'; +const version = getArg('--version') ?? '0.0.0-dev'; + +// ── Paths ─────────────────────────────────────────────────────── + +const swiftDir = path.resolve(import.meta.dir, '..', 'swift'); +const binDir = path.resolve(import.meta.dir, '..', '..', 'varlock', 'native-bins', 'darwin'); +const binaryName = 'varlock-local-encrypt'; +const appBundleName = 'VarlockEnclave.app'; + +// ── Build mode config ─────────────────────────────────────────── + +interface BundleConfig { + bundleId: string; + displayName: string; + menuTitle: string; +} + +const BUNDLE_CONFIGS: Record = { + dev: { + bundleId: 'dev.varlock.enclave.dev', + displayName: 'Varlock (Dev)', + menuTitle: 'Varlock Enclave (Dev)', + }, + preview: { + bundleId: 'dev.varlock.enclave.preview', + displayName: 'Varlock (Preview)', + menuTitle: 'Varlock Enclave (Preview)', + }, + release: { + bundleId: 'dev.varlock.enclave', + displayName: 'Varlock', + menuTitle: 'Varlock Secure Enclave', + }, +}; + +const bundleConfig = BUNDLE_CONFIGS[mode]; +console.log(`Build mode: ${mode}`); +console.log(`Bundle ID: ${bundleConfig.bundleId}`); +console.log(`Display name: ${bundleConfig.displayName}`); + +function run(cmd: string, opts?: { cwd?: string }) { + console.log(`> ${cmd}`); + execSync(cmd, { stdio: 'inherit', cwd: opts?.cwd ?? swiftDir }); +} + +// ── Phase 1: Compile ──────────────────────────────────────────── + +fs.mkdirSync(binDir, { recursive: true }); + +let builtBinaryPath: string; + +if (universal) { + // Build universal binary (arm64 + x86_64) — used in CI + run('swift build -c release --arch arm64'); + run('swift build -c release --arch x86_64'); + + const arm64Binary = path.join(swiftDir, '.build', 'arm64-apple-macosx', 'release', 'VarlockEnclave'); + const x86Binary = path.join(swiftDir, '.build', 'x86_64-apple-macosx', 'release', 'VarlockEnclave'); + + builtBinaryPath = path.join(binDir, `${binaryName}-universal`); + run(`lipo -create "${arm64Binary}" "${x86Binary}" -output "${builtBinaryPath}"`); + run(`lipo -info "${builtBinaryPath}"`); +} else { + // Current platform only — fast for local dev + run('swift build -c release'); + builtBinaryPath = path.join(swiftDir, '.build', 'release', 'VarlockEnclave'); +} + +// ── Phase 2: Bundle ───────────────────────────────────────────── + +const appDir = path.join(binDir, appBundleName); +const contentsDir = path.join(appDir, 'Contents'); +const macosDir = path.join(contentsDir, 'MacOS'); +const resourcesDir = path.join(contentsDir, 'Resources'); + +// Clean previous bundle +fs.rmSync(appDir, { recursive: true, force: true }); +fs.mkdirSync(macosDir, { recursive: true }); +fs.mkdirSync(resourcesDir, { recursive: true }); + +// Copy binary into bundle +const bundleBinaryPath = path.join(macosDir, binaryName); +fs.copyFileSync(builtBinaryPath, bundleBinaryPath); +fs.chmodSync(bundleBinaryPath, 0o755); + +// Clean up temp universal binary if we created one +if (universal) { + fs.unlinkSync(builtBinaryPath); +} + +// Copy icon if it exists +const iconSrc = path.join(import.meta.dir, '..', 'resources', 'AppIcon.icns'); +const hasIcon = fs.existsSync(iconSrc); +if (hasIcon) { + fs.copyFileSync(iconSrc, path.join(resourcesDir, 'AppIcon.icns')); +} + +// Write Info.plist with environment-specific metadata +const infoPlist = ` + + + + CFBundleIdentifier + ${bundleConfig.bundleId} + CFBundleName + ${bundleConfig.displayName} + CFBundleDisplayName + ${bundleConfig.displayName} + CFBundleExecutable + ${binaryName} + CFBundlePackageType + APPL + CFBundleVersion + ${version} + CFBundleShortVersionString + ${version} + LSUIElement + + LSBackgroundOnly + ${hasIcon ? ` + CFBundleIconFile + AppIcon` : ''} + VarlockBuildMode + ${mode} + VarlockMenuTitle + ${bundleConfig.menuTitle} + +`; + +fs.writeFileSync(path.join(contentsDir, 'Info.plist'), infoPlist); + +console.log(`Built app bundle: ${appDir}`); + +// ── Codesign ──────────────────────────────────────────────────── + +if (signingIdentity) { + run(`codesign --force --deep --options runtime --sign "${signingIdentity}" "${appDir}"`); + run(`codesign --verify --verbose "${appDir}"`); + console.log('App bundle signed successfully'); +} else { + run(`codesign --force --deep --sign - "${appDir}"`); + console.log('App bundle ad-hoc signed (use --sign for proper signing)'); +} + +console.log('Done!'); diff --git a/packages/encryption-binary-swift/scripts/kill-daemon.ts b/packages/encryption-binary-swift/scripts/kill-daemon.ts new file mode 100644 index 00000000..919da7cf --- /dev/null +++ b/packages/encryption-binary-swift/scripts/kill-daemon.ts @@ -0,0 +1,47 @@ +#!/usr/bin/env bun + +/** + * Kill the running VarlockEnclave daemon (if any). + * + * Reads the PID from the local-encrypt daemon.pid, + * sends SIGTERM, and cleans up PID and socket files. + */ + +import path from 'node:path'; +import fs from 'node:fs'; +import { getUserVarlockDir } from '../../../packages/varlock/src/lib/user-config-dir'; + +const socketDir = path.join(getUserVarlockDir(), 'local-encrypt'); +const pidPath = path.join(socketDir, 'daemon.pid'); +const socketPath = path.join(socketDir, 'daemon.sock'); + +if (!fs.existsSync(pidPath)) { + console.log('No daemon PID file found, nothing to kill'); + process.exit(0); +} + +const pid = parseInt(fs.readFileSync(pidPath, 'utf-8').trim(), 10); +if (Number.isNaN(pid)) { + console.log('Invalid PID file, cleaning up'); + fs.unlinkSync(pidPath); + process.exit(0); +} + +try { + process.kill(pid, 'SIGTERM'); + console.log(`Killed daemon (PID ${pid})`); +} catch (err: any) { + if (err.code === 'ESRCH') { + console.log(`Daemon (PID ${pid}) was not running, cleaning up stale PID file`); + } else { + throw err; + } +} + +// Clean up PID and socket files +try { + fs.unlinkSync(pidPath); +} catch { /* ignore */ } +try { + fs.unlinkSync(socketPath); +} catch { /* ignore */ } diff --git a/packages/encryption-binary-swift/swift/Package.swift b/packages/encryption-binary-swift/swift/Package.swift new file mode 100644 index 00000000..0ada0708 --- /dev/null +++ b/packages/encryption-binary-swift/swift/Package.swift @@ -0,0 +1,20 @@ +// swift-tools-version: 5.9 +import PackageDescription + +let package = Package( + name: "VarlockEnclave", + platforms: [ + .macOS(.v13), + ], + targets: [ + .executableTarget( + name: "VarlockEnclave", + path: "Sources/VarlockEnclave", + linkerSettings: [ + .linkedFramework("Security"), + .linkedFramework("LocalAuthentication"), + .linkedFramework("AppKit"), + ] + ), + ] +) diff --git a/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/IPCServer.swift b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/IPCServer.swift new file mode 100644 index 00000000..2d754675 --- /dev/null +++ b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/IPCServer.swift @@ -0,0 +1,217 @@ +import Foundation + +/// Unix domain socket IPC server using length-prefixed JSON protocol. +/// +/// Protocol: +/// - 4-byte little-endian message length +/// - JSON payload: { "id": "...", "action": "...", "payload": { ... } } +/// - Response: { "id": "...", "result": "..." } or { "id": "...", "error": "..." } +final class IPCServer { + private let socketPath: String + private var socketFD: Int32 = -1 + private var clientHandlers: [Int32: DispatchWorkItem] = [:] + private let queue = DispatchQueue(label: "dev.varlock.ipc", attributes: .concurrent) + private let handlersQueue = DispatchQueue(label: "dev.varlock.ipc.handlers") + private var isRunning = false + + /// Handler for incoming messages. Second parameter is the peer's TTY identity (nil if unknown). + var messageHandler: ((_ message: [String: Any], _ ttyId: String?) -> [String: Any])? + + /// Called after accept (new client) and after each successfully parsed JSON message. + var onConnectionActivity: (() -> Void)? + + init(socketPath: String) { + self.socketPath = socketPath + } + + // MARK: - Server Lifecycle + + func start() throws { + // Clean up any stale socket file + unlink(socketPath) + + // Ensure parent directory exists + let dir = (socketPath as NSString).deletingLastPathComponent + try FileManager.default.createDirectory(atPath: dir, withIntermediateDirectories: true) + + // Create socket + socketFD = socket(AF_UNIX, SOCK_STREAM, 0) + guard socketFD >= 0 else { + throw IPCError.socketCreationFailed(String(cString: strerror(errno))) + } + + // Bind + var addr = sockaddr_un() + addr.sun_family = sa_family_t(AF_UNIX) + withUnsafeMutablePointer(to: &addr.sun_path) { ptr in + socketPath.withCString { cstr in + _ = strcpy(UnsafeMutableRawPointer(ptr).assumingMemoryBound(to: CChar.self), cstr) + } + } + + let bindResult = withUnsafePointer(to: &addr) { ptr in + ptr.withMemoryRebound(to: sockaddr.self, capacity: 1) { sockaddrPtr in + bind(socketFD, sockaddrPtr, socklen_t(MemoryLayout.size)) + } + } + guard bindResult == 0 else { + close(socketFD) + throw IPCError.bindFailed(String(cString: strerror(errno))) + } + + // Set socket permissions (owner only) + chmod(socketPath, 0o600) + + // Listen + guard listen(socketFD, 5) == 0 else { + close(socketFD) + unlink(socketPath) + throw IPCError.listenFailed(String(cString: strerror(errno))) + } + + isRunning = true + + // Accept loop on background queue + queue.async { [weak self] in + self?.acceptLoop() + } + } + + func stop() { + isRunning = false + if socketFD >= 0 { + close(socketFD) + socketFD = -1 + } + unlink(socketPath) + + // Cancel all client handlers + handlersQueue.sync { + for (fd, work) in clientHandlers { + work.cancel() + close(fd) + } + clientHandlers.removeAll() + } + } + + // MARK: - Accept Loop + + private func acceptLoop() { + while isRunning { + var clientAddr = sockaddr_un() + var clientAddrLen = socklen_t(MemoryLayout.size) + + let clientFD = withUnsafeMutablePointer(to: &clientAddr) { ptr in + ptr.withMemoryRebound(to: sockaddr.self, capacity: 1) { sockaddrPtr in + accept(socketFD, sockaddrPtr, &clientAddrLen) + } + } + + guard clientFD >= 0 else { + if !isRunning { break } + continue + } + + onConnectionActivity?() + + let workItem = DispatchWorkItem { [weak self] in + self?.handleClient(fd: clientFD) + } + handlersQueue.sync { + clientHandlers[clientFD] = workItem + } + queue.async(execute: workItem) + } + } + + // MARK: - Client Handling + + private func handleClient(fd: Int32) { + defer { + close(fd) + handlersQueue.sync { + _ = clientHandlers.removeValue(forKey: fd) + } + } + + // Resolve the peer's TTY identity once per connection + let ttyId: String? + if let peerPid = getPeerPid(fd: fd) { + ttyId = getTtyIdentifier(forPid: peerPid) + } else { + ttyId = nil + } + + while isRunning { + // Read 4-byte length prefix (little-endian) + var lengthBytes = [UInt8](repeating: 0, count: 4) + let bytesRead = recv(fd, &lengthBytes, 4, MSG_WAITALL) + guard bytesRead == 4 else { break } + + let messageLength = Int(UInt32(lengthBytes[0]) + | (UInt32(lengthBytes[1]) << 8) + | (UInt32(lengthBytes[2]) << 16) + | (UInt32(lengthBytes[3]) << 24)) + + guard messageLength > 0, messageLength < 10_000_000 else { break } // 10MB safety limit + + // Read message body + var messageData = Data(count: messageLength) + let bodyRead = messageData.withUnsafeMutableBytes { ptr in + recv(fd, ptr.baseAddress!, messageLength, MSG_WAITALL) + } + guard bodyRead == messageLength else { break } + + // Parse JSON + guard let json = try? JSONSerialization.jsonObject(with: messageData) as? [String: Any] else { + sendResponse(fd: fd, response: ["error": "Invalid JSON"]) + continue + } + + onConnectionActivity?() + + // Handle message with the peer's TTY identity + let response = messageHandler?(json, ttyId) ?? ["error": "No handler"] + sendResponse(fd: fd, id: json["id"] as? String, response: response) + } + } + + private func sendResponse(fd: Int32, id: String? = nil, response: [String: Any]) { + var fullResponse = response + if let id = id { + fullResponse["id"] = id + } + + guard let jsonData = try? JSONSerialization.data(withJSONObject: fullResponse) else { + return + } + + // Write length prefix (4 bytes, little-endian) + var length = UInt32(jsonData.count).littleEndian + _ = withUnsafeBytes(of: &length) { ptr in + send(fd, ptr.baseAddress!, 4, 0) + } + + // Write message body + jsonData.withUnsafeBytes { ptr in + _ = send(fd, ptr.baseAddress!, jsonData.count, 0) + } + } +} + +// MARK: - Errors + +enum IPCError: LocalizedError { + case socketCreationFailed(String) + case bindFailed(String) + case listenFailed(String) + + var errorDescription: String? { + switch self { + case .socketCreationFailed(let msg): return "Socket creation failed: \(msg)" + case .bindFailed(let msg): return "Socket bind failed: \(msg)" + case .listenFailed(let msg): return "Socket listen failed: \(msg)" + } + } +} diff --git a/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/PeerIdentity.swift b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/PeerIdentity.swift new file mode 100644 index 00000000..259a1e06 --- /dev/null +++ b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/PeerIdentity.swift @@ -0,0 +1,64 @@ +import Foundation +import Darwin + +// LOCAL_PEERPID may not be exported by Swift's Darwin module +private let LOCAL_PEERPID: Int32 = 0x002 + +/// Get the PID of the peer connected to a Unix domain socket. +func getPeerPid(fd: Int32) -> pid_t? { + var pid: pid_t = 0 + var pidSize = socklen_t(MemoryLayout.size) + let result = getsockopt(fd, SOL_LOCAL, LOCAL_PEERPID, &pid, &pidSize) + guard result == 0, pid > 0 else { return nil } + return pid +} + +/// Get process info via sysctl KERN_PROC. +private func getProcessInfo(pid: pid_t) -> kinfo_proc? { + var mib: [Int32] = [CTL_KERN, KERN_PROC, KERN_PROC_PID, pid] + var info = kinfo_proc() + var size = MemoryLayout.size + + let result = sysctl(&mib, UInt32(mib.count), &info, &size, nil, 0) + guard result == 0 else { return nil } + return info +} + +/// Get a stable TTY identifier for a process. +/// +/// Combines the TTY device name with the session leader's start time. +/// The session leader is the shell process that owns the TTY (its PID equals +/// the session ID). Using its start time prevents TTY device reuse attacks +/// (where a new terminal is allocated the same /dev/ttysNNN after the old one closed). +/// +/// Returns nil if the process has no controlling TTY (detached, CI, etc). +func getTtyIdentifier(forPid pid: pid_t) -> String? { + guard let info = getProcessInfo(pid: pid) else { return nil } + + let ttyDev = info.kp_eproc.e_tdev + // NODEV (0xFFFFFFFF) or 0 means no controlling tty + guard ttyDev != UInt32.max, ttyDev != 0 else { return nil } + + // Convert device number to name (e.g., "ttys003") + guard let namePtr = devname(dev_t(ttyDev), S_IFCHR) else { return nil } + let ttyName = String(cString: namePtr) + + // Get the session leader's start time for uniqueness. + // getsid() returns the session leader PID (the shell that owns the TTY), + // which is stable across all processes launched from the same terminal. + // (e_tpgid is the *foreground process group*, which changes on every command.) + let sessionLeaderPid = getsid(pid) + var startTimestamp: Int = 0 + + if sessionLeaderPid > 0, let leaderInfo = getProcessInfo(pid: sessionLeaderPid) { + startTimestamp = Int(leaderInfo.kp_proc.p_starttime.tv_sec) + } + + // If we couldn't get the session leader start time, fall back to the + // connecting process's own start time (less ideal but still unique per session) + if startTimestamp == 0 { + startTimestamp = Int(info.kp_proc.p_starttime.tv_sec) + } + + return "\(ttyName):\(startTimestamp)" +} diff --git a/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SecureEnclaveManager.swift b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SecureEnclaveManager.swift new file mode 100644 index 00000000..f86d8203 --- /dev/null +++ b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SecureEnclaveManager.swift @@ -0,0 +1,304 @@ +import Foundation +import Security +import LocalAuthentication +import CryptoKit + +/// Manages Secure Enclave key operations and ECIES encrypt/decrypt. +/// +/// Uses CryptoKit's SecureEnclave.P256 API. Key "data representations" (opaque handles +/// to the SE key, NOT the private key itself) are stored as files on disk. +/// This avoids Keychain entitlement requirements that plague CLI tools. +/// +/// Crypto scheme: +/// - P-256 key stored in Secure Enclave with biometric access control +/// - ECIES: ephemeral P-256 key pair → ECDH → HKDF-SHA256 → AES-256-GCM +/// - Payload: version(1) | ephemeralPubKey(65) | nonce(12) | ciphertext(N) | tag(16) +final class SecureEnclaveManager { + static let payloadVersion: UInt8 = 0x01 + + /// Directory where key data representations are stored + static var keyStorePath: String { + let xdg = ProcessInfo.processInfo.environment["XDG_CONFIG_HOME"] + let base = xdg ?? (NSHomeDirectory() + "/.config") + return base + "/varlock/secure-enclave/keys" + } + + private static func keyFilePath(for keyId: String) -> String { + return keyStorePath + "/\(keyId).keydata" + } + + // MARK: - Key Management + + /// Create a new Secure Enclave P-256 key. + /// + /// By default, requires user presence (Touch ID, Apple Watch, or device password). + /// Pass `requireAuth: false` for CI/testing — key is still SE-backed but no biometric. + /// Saves the key data representation to disk and returns the public key. + static func generateKey(keyId: String, context: LAContext? = nil, requireAuth: Bool = true) throws -> Data { + // Create access control — with or without user presence requirement + var accessError: Unmanaged? + let flags: SecAccessControlCreateFlags = requireAuth + ? [.privateKeyUsage, .userPresence] + : [.privateKeyUsage] + guard let accessControl = SecAccessControlCreateWithFlags( + kCFAllocatorDefault, + kSecAttrAccessibleWhenUnlockedThisDeviceOnly, + flags, + &accessError + ) else { + let err = accessError?.takeRetainedValue() + throw EnclaveError.keyGenerationFailed(err?.localizedDescription ?? "Failed to create access control") + } + + // Generate the SE key via CryptoKit + let privateKey: SecureEnclave.P256.KeyAgreement.PrivateKey + do { + if let context = context { + privateKey = try SecureEnclave.P256.KeyAgreement.PrivateKey( + accessControl: accessControl, + authenticationContext: context + ) + } else { + privateKey = try SecureEnclave.P256.KeyAgreement.PrivateKey( + accessControl: accessControl + ) + } + } catch { + throw EnclaveError.keyGenerationFailed(error.localizedDescription) + } + + // Save the data representation (an opaque handle, NOT the private key) + let dataRepresentation = privateKey.dataRepresentation + let filePath = keyFilePath(for: keyId) + let dir = (filePath as NSString).deletingLastPathComponent + try FileManager.default.createDirectory(atPath: dir, withIntermediateDirectories: true) + try dataRepresentation.write(to: URL(fileURLWithPath: filePath)) + + // Set file permissions to owner-only + try FileManager.default.setAttributes( + [.posixPermissions: 0o600], + ofItemAtPath: filePath + ) + + return Data(privateKey.publicKey.x963Representation) + } + + /// Delete a key by removing its data representation file. + static func deleteKey(keyId: String) -> Bool { + let filePath = keyFilePath(for: keyId) + do { + try FileManager.default.removeItem(atPath: filePath) + return true + } catch { + return false + } + } + + /// List key IDs by scanning the key store directory. + static func listKeys() -> [String] { + let dir = keyStorePath + guard let files = try? FileManager.default.contentsOfDirectory(atPath: dir) else { + return [] + } + return files + .filter { $0.hasSuffix(".keydata") } + .map { String($0.dropLast(".keydata".count)) } + } + + /// Check if a key exists. + static func keyExists(keyId: String) -> Bool { + return FileManager.default.fileExists(atPath: keyFilePath(for: keyId)) + } + + // MARK: - Key Loading + + /// Load a Secure Enclave private key from its stored data representation. + private static func loadPrivateKey(keyId: String, context: LAContext?) throws -> SecureEnclave.P256.KeyAgreement.PrivateKey { + let filePath = keyFilePath(for: keyId) + guard let data = FileManager.default.contents(atPath: filePath) else { + throw EnclaveError.keyNotFound(keyId) + } + + do { + if let context = context { + return try SecureEnclave.P256.KeyAgreement.PrivateKey( + dataRepresentation: data, + authenticationContext: context + ) + } else { + return try SecureEnclave.P256.KeyAgreement.PrivateKey( + dataRepresentation: data + ) + } + } catch { + throw EnclaveError.keyNotFound("\(keyId) - \(error.localizedDescription)") + } + } + + // MARK: - ECIES Encrypt + + /// Encrypt plaintext using ECIES with the Secure Enclave key. + /// + /// Only needs the public key, so no biometric auth required for encryption. + /// Steps: + /// 1. Load SE key to get public key + /// 2. Generate ephemeral P-256 key pair + /// 3. ECDH: ephemeral private × SE public → shared secret + /// 4. HKDF-SHA256 derive AES-256-GCM key + /// 5. AES-256-GCM encrypt + /// 6. Return: version | ephemeralPub | nonce | ciphertext | tag + static func encrypt(plaintext: Data, keyId: String) throws -> Data { + let seKey = try loadPrivateKey(keyId: keyId, context: nil) + let sePublicKey = seKey.publicKey + let pubKeyData = Data(sePublicKey.x963Representation) + + // Generate ephemeral key pair (in software, not SE) + let ephemeralPrivateKey = P256.KeyAgreement.PrivateKey() + let ephemeralPublicKeyData = Data(ephemeralPrivateKey.publicKey.x963Representation) // 65 bytes + + // ECDH: ephemeral private × SE public + let sharedSecret = try ephemeralPrivateKey.sharedSecretFromKeyAgreement(with: sePublicKey) + + // Extract raw shared secret bytes for HKDF + let sharedSecretData = sharedSecret.withUnsafeBytes { Data($0) } + + // HKDF derive AES-256 key (using manual HKDF to match decrypt path) + let symmetricKey = SecureEnclaveManager.deriveKey( + sharedSecret: sharedSecretData, + salt: Data("varlock-ecies-v1".utf8), + info: ephemeralPublicKeyData + pubKeyData, + outputByteCount: 32 + ) + + // AES-256-GCM encrypt + let sealedBox = try AES.GCM.seal(plaintext, using: symmetricKey) + + // Assemble payload: version(1) | ephemeralPub(65) | nonce(12) | ciphertext(N) | tag(16) + var payload = Data() + payload.append(SecureEnclaveManager.payloadVersion) + payload.append(ephemeralPublicKeyData) // 65 bytes + payload.append(contentsOf: sealedBox.nonce) // 12 bytes + payload.append(sealedBox.ciphertext) // N bytes + payload.append(sealedBox.tag) // 16 bytes + + return payload + } + + // MARK: - ECIES Decrypt + + /// Decrypt ciphertext using ECIES with the Secure Enclave key. + /// Uses the provided LAContext for biometric session caching. + /// + /// Steps: + /// 1. Parse payload components + /// 2. Load SE private key with LAContext (uses cached biometric) + /// 3. ECDH: SE private × ephemeral public → shared secret + /// 4. HKDF-SHA256 derive AES-256-GCM key + /// 5. AES-256-GCM decrypt + static func decrypt(payload: Data, keyId: String, context: LAContext?) throws -> Data { + // Parse payload + guard payload.count > 1 + 65 + 12 + 16 else { + throw EnclaveError.decryptionFailed("Payload too short") + } + + let version = payload[0] + guard version == SecureEnclaveManager.payloadVersion else { + throw EnclaveError.decryptionFailed("Unsupported payload version: \(version)") + } + + let ephemeralPubKeyData = payload[1..<66] // 65 bytes + let nonce = payload[66..<78] // 12 bytes + let ciphertextAndTag = payload[78...] + guard ciphertextAndTag.count >= 16 else { + throw EnclaveError.decryptionFailed("Payload too short for tag") + } + let ciphertext = ciphertextAndTag.dropLast(16) + let tag = ciphertextAndTag.suffix(16) + + // Load SE private key with LAContext for cached biometric session + let seKey = try loadPrivateKey(keyId: keyId, context: context) + let pubKeyData = Data(seKey.publicKey.x963Representation) + + // Reconstruct ephemeral public key + let ephemeralPublicKey = try P256.KeyAgreement.PublicKey(x963Representation: ephemeralPubKeyData) + + // ECDH: SE private × ephemeral public + // CryptoKit's SecureEnclave key performs the ECDH inside the SE + let sharedSecret = try seKey.sharedSecretFromKeyAgreement(with: ephemeralPublicKey) + let sharedSecretData = sharedSecret.withUnsafeBytes { Data($0) } + + // Derive symmetric key using HKDF (must match encrypt side) + let symmetricKey = SecureEnclaveManager.deriveKey( + sharedSecret: sharedSecretData, + salt: Data("varlock-ecies-v1".utf8), + info: Data(ephemeralPubKeyData) + pubKeyData, + outputByteCount: 32 + ) + + // AES-256-GCM decrypt + let gcmNonce = try AES.GCM.Nonce(data: nonce) + let sealedBox = try AES.GCM.SealedBox(nonce: gcmNonce, ciphertext: ciphertext, tag: tag) + let decrypted = try AES.GCM.open(sealedBox, using: symmetricKey) + + return decrypted + } +} + +// MARK: - HKDF + +// We implement HKDF manually so both encrypt and decrypt paths are consistent. +// On the encrypt side we could use CryptoKit's built-in HKDF via SharedSecret, +// but on the decrypt side the SE key's sharedSecretFromKeyAgreement also returns +// a SharedSecret, so actually both paths are consistent now. +// Keeping manual HKDF for explicitness and in case we ever need raw SecKey ECDH. +extension SecureEnclaveManager { + /// HKDF-SHA256 key derivation from raw shared secret bytes. + static func deriveKey( + sharedSecret: Data, + salt: Data, + info: Data, + outputByteCount: Int + ) -> SymmetricKey { + // HKDF-Extract + let prk = HMAC.authenticationCode(for: sharedSecret, using: SymmetricKey(data: salt)) + let prkData = Data(prk) + + // HKDF-Expand + var okm = Data() + var t = Data() + var counter: UInt8 = 1 + + while okm.count < outputByteCount { + var input = t + input.append(info) + input.append(counter) + t = Data(HMAC.authenticationCode(for: input, using: SymmetricKey(data: prkData))) + okm.append(t) + counter += 1 + } + + return SymmetricKey(data: okm.prefix(outputByteCount)) + } +} + +// MARK: - Error Types + +enum EnclaveError: LocalizedError { + case keyGenerationFailed(String) + case keyNotFound(String) + case encryptionFailed(String) + case decryptionFailed(String) + case biometricFailed(String) + case notSupported(String) + + var errorDescription: String? { + switch self { + case .keyGenerationFailed(let msg): return "Key generation failed: \(msg)" + case .keyNotFound(let keyId): return "Key not found: \(keyId)" + case .encryptionFailed(let msg): return "Encryption failed: \(msg)" + case .decryptionFailed(let msg): return "Decryption failed: \(msg)" + case .biometricFailed(let msg): return "Biometric authentication failed: \(msg)" + case .notSupported(let msg): return "Not supported: \(msg)" + } + } +} diff --git a/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SecureInputDialog.swift b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SecureInputDialog.swift new file mode 100644 index 00000000..c3ba4387 --- /dev/null +++ b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SecureInputDialog.swift @@ -0,0 +1,74 @@ +import AppKit + +/// Shows a native macOS dialog with a secure text field for entering secrets. +/// Runs on the main thread and blocks until the user submits or cancels. +final class SecureInputDialog { + /// Show a secure input dialog and return the entered text, or nil if cancelled. + static func prompt(title: String, message: String, itemKey: String?) -> String? { + var result: String? + let work = { + // Ensure the app has an Edit menu so Cmd+V/C/X/A work in text fields. + // NSAlert doesn't create one, so keyboard shortcuts are dead without this. + ensureEditMenu() + + let alert = NSAlert() + alert.messageText = title + alert.informativeText = message + alert.alertStyle = .informational + alert.addButton(withTitle: "Encrypt") + alert.addButton(withTitle: "Cancel") + + let inputField = NSSecureTextField(frame: NSRect(x: 0, y: 0, width: 300, height: 24)) + inputField.placeholderString = "Enter or paste secret value..." + alert.accessoryView = inputField + + // Set the window title to include the item key for context + let appName = Bundle.main.object(forInfoDictionaryKey: "CFBundleDisplayName") as? String ?? "Varlock" + alert.window.title = itemKey.map { "\(appName) — \($0)" } ?? appName + + // Bring the app to front so the dialog is visible + NSApp.activate(ignoringOtherApps: true) + + // Make the input field the first responder after the alert is shown + alert.window.initialFirstResponder = inputField + + let response = alert.runModal() + if response == .alertFirstButtonReturn { + let value = inputField.stringValue + if !value.isEmpty { + result = value + } + } + } + + if Thread.isMainThread { + work() + } else { + DispatchQueue.main.sync { work() } + } + + return result + } + + /// Create a minimal Edit menu so standard keyboard shortcuts work. + /// Safe to call multiple times — only creates the menu once. + private static var editMenuInstalled = false + private static func ensureEditMenu() { + guard !editMenuInstalled else { return } + editMenuInstalled = true + + let mainMenu = NSApp.mainMenu ?? NSMenu() + + let editMenuItem = NSMenuItem(title: "Edit", action: nil, keyEquivalent: "") + let editMenu = NSMenu(title: "Edit") + + editMenu.addItem(withTitle: "Cut", action: #selector(NSText.cut(_:)), keyEquivalent: "x") + editMenu.addItem(withTitle: "Copy", action: #selector(NSText.copy(_:)), keyEquivalent: "c") + editMenu.addItem(withTitle: "Paste", action: #selector(NSText.paste(_:)), keyEquivalent: "v") + editMenu.addItem(withTitle: "Select All", action: #selector(NSText.selectAll(_:)), keyEquivalent: "a") + + editMenuItem.submenu = editMenu + mainMenu.addItem(editMenuItem) + NSApp.mainMenu = mainMenu + } +} diff --git a/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SessionManager.swift b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SessionManager.swift new file mode 100644 index 00000000..9c2d7e39 --- /dev/null +++ b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/SessionManager.swift @@ -0,0 +1,186 @@ +import Foundation +import LocalAuthentication +import AppKit + +/// Manages biometric authentication sessions for the daemon, scoped per-TTY. +/// +/// Each terminal must independently authenticate via Touch ID. This prevents +/// rogue processes in other terminals from piggybacking on an existing session. +/// +/// Biometric reuse timeout is handled by macOS via `touchIDAuthenticationAllowableReuseDuration`. +/// This manager handles per-TTY scoping, explicit invalidation (lock command), +/// and system events (sleep, screen lock). +final class SessionManager { + /// How long Touch ID stays unlocked per terminal before re-prompting (seconds). + /// Passed to macOS via `touchIDAuthenticationAllowableReuseDuration`. + static let sessionTimeout: TimeInterval = 300 // 5 minutes + + /// How long the daemon stays alive with no connections at all + static let daemonInactivityTimeout: TimeInterval = 1800 // 30 minutes + + /// Fallback key for processes without a controlling terminal + static let noTtyFallback = "__no_tty__" + + /// Per-TTY cached LAContext (macOS owns the timeout via reuse duration) + private var contexts: [String: LAContext] = [:] + private let queue = DispatchQueue(label: "dev.varlock.session") + + /// Called when the daemon should shut down due to inactivity + var onDaemonTimeout: (() -> Void)? + + private var daemonTimer: DispatchSourceTimer? + + init() { + setupSystemNotifications() + resetDaemonTimer() + } + + deinit { + daemonTimer?.cancel() + } + + // MARK: - Public API + + /// Get or create an authenticated LAContext for the given TTY. + /// On first call per TTY, triggers Touch ID. Subsequent calls within the + /// reuse duration return the cached context without re-prompting. + func getAuthenticatedContext(ttyId: String?) throws -> LAContext { + let key = ttyId ?? SessionManager.noTtyFallback + + return try queue.sync { + // Return cached context if available — macOS handles expiry + // via touchIDAuthenticationAllowableReuseDuration + if let context = contexts[key] { + resetDaemonTimer() + return context + } + + // Need fresh auth for this TTY + let context = LAContext() + context.touchIDAuthenticationAllowableReuseDuration = SessionManager.sessionTimeout + + // Use deviceOwnerAuthentication which accepts Touch ID, Apple Watch, + // or device password — works on machines without biometrics and + // supports the "Use Password" fallback in the Touch ID dialog. + var authError: NSError? + guard context.canEvaluatePolicy(.deviceOwnerAuthentication, error: &authError) else { + let msg = authError?.localizedDescription ?? "Authentication not available" + throw EnclaveError.biometricFailed(msg) + } + + // Synchronous authentication evaluation + let semaphore = DispatchSemaphore(value: 0) + var evalError: Error? + + context.evaluatePolicy( + .deviceOwnerAuthentication, + localizedReason: "decrypt your secrets" + ) { success, error in + if !success { + evalError = error + } + semaphore.signal() + } + + semaphore.wait() + + if let error = evalError { + throw EnclaveError.biometricFailed(error.localizedDescription) + } + + // Cache the authenticated context for this TTY + contexts[key] = context + resetDaemonTimer() + + return context + } + } + + /// Invalidate all TTY sessions (used by lock command, sleep/lock events). + func invalidateAllSessions() { + queue.sync { + for (_, context) in contexts { + context.invalidate() + } + contexts.removeAll() + } + } + + /// Resets the daemon shutdown timer (no Touch ID). Call for any IPC so the + /// process stays up while clients use ping, encrypt, etc., not only decrypt. + func noteIpcActivity() { + queue.async { [weak self] in + self?.resetDaemonTimer() + } + } + + /// Whether the given TTY has a cached session. + /// Note: the session may still re-prompt if macOS's reuse duration has expired. + func isSessionWarm(ttyId: String?) -> Bool { + let key = ttyId ?? SessionManager.noTtyFallback + return queue.sync { + return contexts[key] != nil + } + } + + /// Whether any TTY has a cached session. + func hasAnySessions() -> Bool { + return queue.sync { + return !contexts.isEmpty + } + } + + // MARK: - Private + + private func resetDaemonTimer() { + daemonTimer?.cancel() + let timer = DispatchSource.makeTimerSource(queue: queue) + timer.schedule(deadline: .now() + SessionManager.daemonInactivityTimeout) + timer.setEventHandler { [weak self] in + self?.onDaemonTimeout?() + } + timer.resume() + daemonTimer = timer + } + + // MARK: - System Notifications + + private func setupSystemNotifications() { + let workspace = NSWorkspace.shared + let notificationCenter = workspace.notificationCenter + + // Screen lock / sleep → invalidate ALL sessions + notificationCenter.addObserver( + forName: NSWorkspace.willSleepNotification, + object: nil, + queue: .main + ) { [weak self] _ in + self?.invalidateAllSessions() + } + + notificationCenter.addObserver( + forName: NSWorkspace.sessionDidResignActiveNotification, + object: nil, + queue: .main + ) { [weak self] _ in + self?.invalidateAllSessions() + } + + notificationCenter.addObserver( + forName: NSWorkspace.screensDidSleepNotification, + object: nil, + queue: .main + ) { [weak self] _ in + self?.invalidateAllSessions() + } + + // Also invalidate when screens lock (available on macOS 13+) + DistributedNotificationCenter.default().addObserver( + forName: NSNotification.Name("com.apple.screenIsLocked"), + object: nil, + queue: .main + ) { [weak self] _ in + self?.invalidateAllSessions() + } + } +} diff --git a/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/StatusBarMenu.swift b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/StatusBarMenu.swift new file mode 100644 index 00000000..0078ab94 --- /dev/null +++ b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/StatusBarMenu.swift @@ -0,0 +1,106 @@ +import AppKit + +/// Manages the macOS menu bar status item for the Varlock Enclave daemon. +final class StatusBarMenu: NSObject, NSMenuDelegate { + private var statusItem: NSStatusItem? + private let menu = NSMenu() + private let sessionManager: SessionManager + private let onLock: () -> Void + private let onQuit: () -> Void + + init( + sessionManager: SessionManager, + onLock: @escaping () -> Void, + onQuit: @escaping () -> Void + ) { + self.sessionManager = sessionManager + self.onLock = onLock + self.onQuit = onQuit + super.init() + setupStatusItem() + } + + private func setupStatusItem() { + statusItem = NSStatusBar.system.statusItem(withLength: NSStatusItem.squareLength) + + guard let button = statusItem?.button else { return } + updateIcon() + let menuTitle = Bundle.main.object(forInfoDictionaryKey: "VarlockMenuTitle") as? String ?? "Varlock Secure Enclave" + button.toolTip = menuTitle + + menu.delegate = self + statusItem?.menu = menu + rebuildMenuItems() + } + + private func updateIcon() { + guard let button = statusItem?.button else { return } + let hasActiveSessions = sessionManager.hasAnySessions() + button.image = nil + button.title = hasActiveSessions ? "🔓" : "🔒" + } + + // NSMenuDelegate — update items and icon each time the menu opens + func menuWillOpen(_ menu: NSMenu) { + updateIcon() + rebuildMenuItems() + } + + private func rebuildMenuItems() { + menu.removeAllItems() + + // Header + let menuTitle = Bundle.main.object(forInfoDictionaryKey: "VarlockMenuTitle") as? String ?? "Varlock Secure Enclave" + let headerItem = NSMenuItem(title: menuTitle, action: nil, keyEquivalent: "") + headerItem.isEnabled = false + menu.addItem(headerItem) + + menu.addItem(NSMenuItem.separator()) + + // Lock action — disabled with status text when already locked + let hasActiveSessions = sessionManager.hasAnySessions() + if hasActiveSessions { + let lockItem = NSMenuItem(title: "Lock", action: #selector(lockClicked), keyEquivalent: "l") + lockItem.target = self + menu.addItem(lockItem) + } else { + let lockedItem = NSMenuItem(title: "Locked", action: nil, keyEquivalent: "") + lockedItem.isEnabled = false + menu.addItem(lockedItem) + } + + menu.addItem(NSMenuItem.separator()) + + // Quit + let quitItem = NSMenuItem(title: "Quit Daemon", action: #selector(quitClicked), keyEquivalent: "q") + quitItem.target = self + menu.addItem(quitItem) + } + + @objc private func lockClicked() { + onLock() + updateIcon() + } + + @objc private func quitClicked() { + onQuit() + } + + /// Call from any thread after a session state change to update the icon + func refresh() { + // Use performSelector to ensure the update runs in the next run loop iteration + // on the main thread — more reliable than DispatchQueue.main.async with NSApplication + performSelector(onMainThread: #selector(doRefresh), with: nil, waitUntilDone: false) + } + + @objc private func doRefresh() { + updateIcon() + } + + func remove() { + if let item = statusItem { + NSStatusBar.system.removeStatusItem(item) + statusItem = nil + } + } +} diff --git a/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/main.swift b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/main.swift new file mode 100644 index 00000000..6cd3297f --- /dev/null +++ b/packages/encryption-binary-swift/swift/Sources/VarlockEnclave/main.swift @@ -0,0 +1,350 @@ +import Foundation +import AppKit + +// MARK: - JSON Output Helpers + +func jsonOutput(_ dict: [String: Any]) { + guard let data = try? JSONSerialization.data(withJSONObject: dict), + let str = String(data: data, encoding: .utf8) else { + fputs("{\"error\":\"Failed to serialize output\"}\n", stderr) + exit(1) + } + print(str) +} + +func jsonError(_ message: String) -> Never { + jsonOutput(["error": message]) + exit(1) +} + +func jsonSuccess(_ result: [String: Any]) -> Never { + jsonOutput(["ok": true].merging(result) { _, new in new }) + exit(0) +} + +// MARK: - CLI Parsing + +let args = CommandLine.arguments +let command = args.count > 1 ? args[1] : "help" + +func getArg(_ flag: String) -> String? { + guard let idx = args.firstIndex(of: flag), idx + 1 < args.count else { return nil } + return args[idx + 1] +} + +let defaultKeyId = "varlock-default" +let noAuth = args.contains("--no-auth") // CI mode: skip biometric requirement + +switch command { + +// MARK: - generate-key + +case "generate-key": + let keyId = getArg("--key-id") ?? defaultKeyId + + do { + let pubKeyData = try SecureEnclaveManager.generateKey(keyId: keyId, requireAuth: !noAuth) + jsonSuccess([ + "keyId": keyId, + "publicKey": pubKeyData.base64EncodedString(), + "publicKeyBytes": pubKeyData.count, + ]) + } catch { + jsonError(error.localizedDescription) + } + +// MARK: - delete-key + +case "delete-key": + let keyId = getArg("--key-id") ?? defaultKeyId + let deleted = SecureEnclaveManager.deleteKey(keyId: keyId) + jsonSuccess(["keyId": keyId, "deleted": deleted]) + +// MARK: - list-keys + +case "list-keys": + let keys = SecureEnclaveManager.listKeys() + jsonSuccess(["keys": keys]) + +// MARK: - key-exists + +case "key-exists": + let keyId = getArg("--key-id") ?? defaultKeyId + let exists = SecureEnclaveManager.keyExists(keyId: keyId) + jsonSuccess(["keyId": keyId, "exists": exists]) + +// MARK: - encrypt + +case "encrypt": + let keyId = getArg("--key-id") ?? defaultKeyId + + guard let dataB64 = getArg("--data") else { + jsonError("Missing --data argument (base64-encoded plaintext)") + } + guard let plaintext = Data(base64Encoded: dataB64) else { + jsonError("Invalid base64 data") + } + + do { + let encrypted = try SecureEnclaveManager.encrypt(plaintext: plaintext, keyId: keyId) + jsonSuccess(["ciphertext": encrypted.base64EncodedString()]) + } catch { + jsonError(error.localizedDescription) + } + +// MARK: - decrypt (one-shot, for testing) + +case "decrypt": + let keyId = getArg("--key-id") ?? defaultKeyId + + guard let dataB64 = getArg("--data") else { + jsonError("Missing --data argument (base64-encoded ciphertext)") + } + guard let ciphertext = Data(base64Encoded: dataB64) else { + jsonError("Invalid base64 data") + } + + do { + let decrypted = try SecureEnclaveManager.decrypt(payload: ciphertext, keyId: keyId, context: nil) + guard let plaintext = String(data: decrypted, encoding: .utf8) else { + jsonError("Decrypted data is not valid UTF-8") + } + jsonSuccess(["plaintext": plaintext]) + } catch { + jsonError(error.localizedDescription) + } + +// MARK: - status + +case "status": + let seAvailable: Bool + #if targetEnvironment(simulator) + seAvailable = false + #else + seAvailable = true // If this binary runs on real hardware, SE is available + #endif + + jsonSuccess([ + "secureEnclaveAvailable": seAvailable, + "platform": "darwin", + "arch": { + #if arch(arm64) + return "arm64" + #elseif arch(x86_64) + return "x86_64" + #else + return "unknown" + #endif + }(), + "keys": SecureEnclaveManager.listKeys(), + ]) + +// MARK: - daemon + +case "daemon": + guard let socketPath = getArg("--socket-path") else { + jsonError("Missing --socket-path argument") + } + + let sessionManager = SessionManager() + let server = IPCServer(socketPath: socketPath) + + // Write PID file + let pidPath = getArg("--pid-path") + if let pidPath = pidPath { + let pidDir = (pidPath as NSString).deletingLastPathComponent + try? FileManager.default.createDirectory(atPath: pidDir, withIntermediateDirectories: true) + try? "\(ProcessInfo.processInfo.processIdentifier)".write(toFile: pidPath, atomically: true, encoding: .utf8) + } + + // Status bar menu (must be created before run loop starts) + // NSApplication is needed for status bar items to work + let app = NSApplication.shared + app.setActivationPolicy(.accessory) // no Dock icon + + var statusBarMenu: StatusBarMenu? + + // Handle daemon shutdown + func shutdownDaemon() { + statusBarMenu?.remove() + server.stop() + if let pidPath = pidPath { + try? FileManager.default.removeItem(atPath: pidPath) + } + exit(0) + } + + sessionManager.onDaemonTimeout = { + shutdownDaemon() + } + + server.onConnectionActivity = { + sessionManager.noteIpcActivity() + } + + // Handle IPC messages (ttyId is resolved from the peer's controlling terminal) + server.messageHandler = { message, ttyId in + guard let action = message["action"] as? String else { + return ["error": "Missing action"] + } + + switch action { + case "decrypt": + guard let payload = message["payload"] as? [String: Any], + let ciphertextB64 = payload["ciphertext"] as? String, + let ciphertext = Data(base64Encoded: ciphertextB64) else { + return ["error": "Missing or invalid ciphertext in payload"] + } + + let keyId = (payload["keyId"] as? String) ?? defaultKeyId + + do { + let context = try sessionManager.getAuthenticatedContext(ttyId: ttyId) + let decrypted = try SecureEnclaveManager.decrypt( + payload: ciphertext, + keyId: keyId, + context: context + ) + guard let plaintext = String(data: decrypted, encoding: .utf8) else { + return ["error": "Decrypted data is not valid UTF-8"] + } + statusBarMenu?.refresh() + return ["result": plaintext] + } catch { + return ["error": error.localizedDescription] + } + + case "ping": + return [ + "result": [ + "pong": true, + "sessionWarm": sessionManager.isSessionWarm(ttyId: ttyId), + "ttyId": ttyId as Any, + ], + ] + + case "encrypt": + guard let payload = message["payload"] as? [String: Any], + let plaintextStr = payload["plaintext"] as? String else { + return ["error": "Missing plaintext in payload"] + } + + let encKeyId = (payload["keyId"] as? String) ?? defaultKeyId + guard let plaintextData = plaintextStr.data(using: .utf8) else { + return ["error": "Plaintext is not valid UTF-8"] + } + + do { + let encrypted = try SecureEnclaveManager.encrypt(plaintext: plaintextData, keyId: encKeyId) + return ["result": encrypted.base64EncodedString()] + } catch { + return ["error": error.localizedDescription] + } + + case "prompt-secret": + let promptPayload = message["payload"] as? [String: Any] + let itemKey = promptPayload?["itemKey"] as? String + let promptMessage = promptPayload?["message"] as? String + ?? "Enter the secret value to encrypt:" + + guard let value = SecureInputDialog.prompt( + title: "Varlock — Enter Secret", + message: promptMessage, + itemKey: itemKey + ) else { + return ["error": "cancelled"] + } + + // Encrypt the entered value immediately + let promptKeyId = (promptPayload?["keyId"] as? String) ?? defaultKeyId + guard let valueData = value.data(using: .utf8) else { + return ["error": "Value is not valid UTF-8"] + } + + do { + let encrypted = try SecureEnclaveManager.encrypt(plaintext: valueData, keyId: promptKeyId) + return ["result": [ + "ciphertext": encrypted.base64EncodedString(), + ]] + } catch { + return ["error": error.localizedDescription] + } + + case "invalidate-session": + sessionManager.invalidateAllSessions() + statusBarMenu?.refresh() + return ["result": "all sessions invalidated"] + + default: + return ["error": "Unknown action: \(action)"] + } + } + + // Start server + do { + try server.start() + + // Print ready message to stdout so the JS launcher knows we're ready + jsonOutput(["ready": true, "pid": ProcessInfo.processInfo.processIdentifier, "socketPath": socketPath]) + fflush(stdout) + + // Set up status bar menu + statusBarMenu = StatusBarMenu( + sessionManager: sessionManager, + onLock: { + sessionManager.invalidateAllSessions() + statusBarMenu?.refresh() + }, + onQuit: { + shutdownDaemon() + } + ) + + // We need a run loop for NSWorkspace notifications (sleep/lock detection) + // and for the status bar menu to work + signal(SIGTERM, SIG_IGN) + signal(SIGINT, SIG_IGN) + + let sigTermSource = DispatchSource.makeSignalSource(signal: SIGTERM, queue: .main) + sigTermSource.setEventHandler { shutdownDaemon() } + sigTermSource.resume() + + let sigIntSource = DispatchSource.makeSignalSource(signal: SIGINT, queue: .main) + sigIntSource.setEventHandler { shutdownDaemon() } + sigIntSource.resume() + + app.run() + } catch { + jsonError("Failed to start daemon: \(error.localizedDescription)") + } + +// MARK: - help + +case "help", "--help", "-h": + let help = """ + varlock-enclave - Secure Enclave encryption daemon for Varlock + + COMMANDS: + generate-key [--key-id ] Create a new Secure Enclave key + delete-key [--key-id ] Delete a Secure Enclave key + list-keys List all Varlock Secure Enclave keys + key-exists [--key-id ] Check if a key exists + encrypt --data [--key-id ] Encrypt data (one-shot) + decrypt --data [--key-id ] Decrypt data (one-shot, testing) + status Check Secure Enclave availability + daemon --socket-path [--pid-path ] Start IPC daemon + + OPTIONS: + --key-id Key identifier (default: varlock-default) + --data Base64-encoded data + --socket-path Unix socket path for daemon mode + --pid-path PID file path for daemon mode + + All output is JSON. Errors return {"error": "message"}. + """ + print(help) + exit(0) + +default: + jsonError("Unknown command: \(command). Run with --help for usage.") +} diff --git a/packages/varlock/.gitignore b/packages/varlock/.gitignore index 5b1113d9..e6f1c68f 100644 --- a/packages/varlock/.gitignore +++ b/packages/varlock/.gitignore @@ -1,2 +1,3 @@ dist -dist-sea \ No newline at end of file +dist-sea +native-bins diff --git a/packages/varlock/package.json b/packages/varlock/package.json index cf0df48a..5fad62d9 100644 --- a/packages/varlock/package.json +++ b/packages/varlock/package.json @@ -44,7 +44,8 @@ ], "files": [ "/bin", - "/dist" + "/dist", + "/native-bins" ], "bin": { "varlock": "./bin/cli.js" diff --git a/packages/varlock/scripts/build-binaries.ts b/packages/varlock/scripts/build-binaries.ts index 669c63e4..9e18248a 100644 --- a/packages/varlock/scripts/build-binaries.ts +++ b/packages/varlock/scripts/build-binaries.ts @@ -1,10 +1,12 @@ import { execSync } from 'node:child_process'; import path from 'node:path'; +import fs from 'node:fs'; import { fileURLToPath } from 'node:url'; const __dirname = path.dirname(fileURLToPath(import.meta.url)); const PKG_DIR = path.resolve(__dirname, '..'); const DIST_DIR = 'dist-sea'; +const NATIVE_BINS_DIR = path.join(PKG_DIR, 'native-bins'); const ENTRY = 'src/cli/cli-executable.ts'; const ALL_TARGETS = [ @@ -67,6 +69,39 @@ if (devMode) { ENTRY, ].join(' ')); + // Bundle platform-specific native binaries alongside the CLI binary + const isMac = archiveName.startsWith('macos-'); + if (isMac) { + const appBundleSrc = path.join(NATIVE_BINS_DIR, 'darwin', 'VarlockEnclave.app'); + if (fs.existsSync(appBundleSrc)) { + console.log(' Bundling macOS native binary (VarlockEnclave.app)'); + exec(`cp -R "${appBundleSrc}" "${targetDir}/VarlockEnclave.app"`); + } else { + console.log(` Warning: macOS native binary not found at ${appBundleSrc}, skipping`); + } + } + + // Bundle Rust native binary for Linux/Windows + let nativeBinSubdir: string | null = null; + if (isWin) { + nativeBinSubdir = 'win32-x64'; + } else if (archiveName.startsWith('linux-musl-')) { + nativeBinSubdir = `linux-${archiveName.replace('linux-musl-', '')}`; + } else if (archiveName.startsWith('linux-')) { + nativeBinSubdir = `linux-${archiveName.replace('linux-', '')}`; + } + + if (nativeBinSubdir && !isMac) { + const rustBinaryName = isWin ? 'varlock-local-encrypt.exe' : 'varlock-local-encrypt'; + const rustBinarySrc = path.join(NATIVE_BINS_DIR, nativeBinSubdir, rustBinaryName); + if (fs.existsSync(rustBinarySrc)) { + console.log(` Bundling Rust native binary (${nativeBinSubdir}/${rustBinaryName})`); + exec(`cp "${rustBinarySrc}" "${targetDir}/${rustBinaryName}"`); + } else { + console.log(` Warning: Rust native binary not found at ${rustBinarySrc}, skipping`); + } + } + // Archive let archive: string; let archiveCmd: string; diff --git a/packages/varlock/src/cli/cli-executable.ts b/packages/varlock/src/cli/cli-executable.ts index 4584a4aa..cb0f4b03 100644 --- a/packages/varlock/src/cli/cli-executable.ts +++ b/packages/varlock/src/cli/cli-executable.ts @@ -15,7 +15,8 @@ import { commandSpec as initCommandSpec } from './commands/init.command'; import { commandSpec as loadCommandSpec } from './commands/load.command'; import { commandSpec as runCommandSpec } from './commands/run.command'; import { commandSpec as printenvCommandSpec } from './commands/printenv.command'; -// import { commandSpec as encryptCommandSpec } from './commands/encrypt.command'; +import { commandSpec as encryptCommandSpec } from './commands/encrypt.command'; +import { commandSpec as lockCommandSpec } from './commands/lock.command'; // import { commandSpec as doctorCommandSpec } from './commands/doctor.command'; import { commandSpec as helpCommandSpec } from './commands/help.command'; import { commandSpec as telemetryCommandSpec } from './commands/telemetry.command'; @@ -52,7 +53,8 @@ subCommands.set('init', buildLazyCommand(initCommandSpec, async () => await impo subCommands.set('load', buildLazyCommand(loadCommandSpec, async () => await import('./commands/load.command'))); subCommands.set('run', buildLazyCommand(runCommandSpec, async () => await import('./commands/run.command'))); subCommands.set('printenv', buildLazyCommand(printenvCommandSpec, async () => await import('./commands/printenv.command'))); -// subCommands.set('encrypt', buildLazyCommand(encryptCommandSpec, async () => await import('./commands/encrypt.command'))); +subCommands.set('encrypt', buildLazyCommand(encryptCommandSpec, async () => await import('./commands/encrypt.command'))); +subCommands.set('lock', buildLazyCommand(lockCommandSpec, async () => await import('./commands/lock.command'))); // subCommands.set('doctor', buildLazyCommand(doctorCommandSpec, async () => await import('./commands/doctor.command'))); subCommands.set('explain', buildLazyCommand(explainCommandSpec, async () => await import('./commands/explain.command'))); subCommands.set('help', buildLazyCommand(helpCommandSpec, async () => await import('./commands/help.command'))); diff --git a/packages/varlock/src/cli/commands/encrypt.command.ts b/packages/varlock/src/cli/commands/encrypt.command.ts index ee9d926e..eb724da1 100644 --- a/packages/varlock/src/cli/commands/encrypt.command.ts +++ b/packages/varlock/src/cli/commands/encrypt.command.ts @@ -1,122 +1,167 @@ - import { define } from 'gunshi'; import { isCancel, password } from '@clack/prompts'; - -import { VarlockNativeAppClient } from '../../lib/native-app-client'; +import ansis from 'ansis'; +import path from 'node:path'; +import fs from 'node:fs'; + +import { + ParsedEnvSpecStaticValue, + ParsedEnvSpecFunctionCall, +} from '@env-spec/parser'; +import { FileBasedDataSource } from '../../env-graph'; +import { loadVarlockEnvGraph } from '../../lib/load-graph'; import { type TypedGunshiCommandFn } from '../helpers/gunshi-type-utils'; +import { CliExitError } from '../helpers/exit-error'; +import { multiselect } from '../helpers/prompts'; import { gracefulExit } from 'exit-hook'; +import * as localEncrypt from '../../lib/local-encrypt'; export const commandSpec = define({ name: 'encrypt', - description: 'Encrypt environment variables in your .env file', - args: {}, + description: 'Encrypt a value using device-local encryption', + args: { + 'key-id': { + type: 'string', + description: 'Encryption key ID (default: varlock-default)', + default: 'varlock-default', + }, + file: { + type: 'string', + description: 'Path to a .env file — encrypts all sensitive plaintext values in-place', + }, + }, }); +async function encryptFile(keyId: string, filePath: string) { + const resolvedPath = path.resolve(filePath); + if (!fs.existsSync(resolvedPath)) { + throw new CliExitError(`File not found: ${resolvedPath}`); + } + + // Load the full env graph and resolve to get sensitivity info from the schema + const envGraph = await loadVarlockEnvGraph(); + await envGraph.resolveEnvValues(); + + // Find the data source matching the target file + const targetSource = envGraph.sortedDataSources.find( + (s) => s instanceof FileBasedDataSource && s.fullPath === resolvedPath, + ) as FileBasedDataSource | undefined; + + if (!targetSource) { + throw new CliExitError( + `File "${filePath}" is not part of the loaded env graph`, + { suggestion: 'Make sure the file is in the project directory or imported by your schema.' }, + ); + } + + // Find sensitive items that have plaintext static values in this file + const itemsToEncrypt: Array<{ key: string; value: string }> = []; + + for (const [key, itemDef] of Object.entries(targetSource.configItemDefs)) { + const graphItem = envGraph.configSchema[key]; + if (!graphItem?.isSensitive) continue; + + // Skip items already using varlock() or another function call + if (itemDef.parsedValue instanceof ParsedEnvSpecFunctionCall) continue; + + // Only encrypt items with actual static string values + if (!(itemDef.parsedValue instanceof ParsedEnvSpecStaticValue)) continue; + const val = itemDef.parsedValue.unescapedValue; + if (val === undefined || val === '' || typeof val !== 'string') continue; + + itemsToEncrypt.push({ key, value: val }); + } + + if (itemsToEncrypt.length === 0) { + console.log('No sensitive plaintext values found to encrypt.'); + return; + } + + console.log('Only items marked as @sensitive in the schema are shown.'); + console.log('If a key is missing, add @sensitive to it in your schema file.\n'); + + const selected = await multiselect({ + message: `Confirm values to encrypt in ${filePath} ${ansis.gray('(use arrows, space to toggle, enter to confirm)')}`, + options: itemsToEncrypt.map((item) => ({ + value: item.key, + label: item.key, + })), + initialValues: itemsToEncrypt.map((item) => item.key), + }); + + if (isCancel(selected)) return gracefulExit(); + + const selectedKeys = new Set(selected as Array); + const filteredItems = itemsToEncrypt.filter((item) => selectedKeys.has(item.key)); + + if (filteredItems.length === 0) { + console.log('No items selected.'); + return; + } + + console.log(''); + + // Encrypt each value and write back using string replacement on the raw file. + // We re-read each time since prior replacements modify the file. + let encryptedCount = 0; + for (const item of filteredItems) { + const ciphertext = await localEncrypt.encryptValue(item.value, keyId); + const prefixed = `local:${ciphertext}`; + + const currentContents = fs.readFileSync(resolvedPath, 'utf-8'); + // Match the line for this key and replace the static value with varlock("local:...") + const escaped = item.key.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); + const pattern = new RegExp(`^(${escaped}\\s*=\\s*).*$`, 'm'); + const updatedContents = currentContents.replace(pattern, `$1varlock("${prefixed}")`); + + if (updatedContents !== currentContents) { + fs.writeFileSync(resolvedPath, updatedContents); + encryptedCount++; + console.log(` Encrypted: ${item.key}`); + } + } + + console.log(`\nEncrypted ${encryptedCount} value${encryptedCount !== 1 ? 's' : ''} in ${filePath}`); +} + export const commandFn: TypedGunshiCommandFn = async (ctx) => { + const keyId = String(ctx.values['key-id'] || 'varlock-default'); + const backend = localEncrypt.getBackendInfo(); + + try { + await localEncrypt.ensureKey(keyId); + } catch (err) { + if (err instanceof CliExitError) throw err; + throw new CliExitError( + `Failed to check/create encryption key: ${err instanceof Error ? err.message : err}`, + ); + } + + console.log(`Using ${backend.type} backend (${backend.hardwareBacked ? 'hardware-backed' : 'file-based'})`); + + const filePath = ctx.values.file; + + // --file mode: encrypt all sensitive plaintext values in a .env file + if (filePath) { + await encryptFile(keyId, filePath); + return; + } + + // Interactive single-value mode console.log(''); - console.log('🧙 Encrypting environment variables... ✨'); - // intro('🧙 Encrypting environment variables... ✨'); const rawValue = await password({ message: 'Enter the value you want to encrypt' }); if (isCancel(rawValue)) return gracefulExit(); - const client = new VarlockNativeAppClient(); - await client.initializeSocket(); - const encryptedValue = await client.encrypt(rawValue); - - console.log('Copy this into your .env.local file and rename the key appropriately:\n'); - console.log(`SOME_SENSITIVE_KEY=varlock("${encryptedValue}")`); - - // const envGraph = await loadEnvGraph(); - // await envGraph.resolveEnvValues(); - // const resolvedEnv = envGraph.getResolvedEnvObject(); - - // TODO: need to reimplement using the new parser - - // const client = new VarlockNativeAppClient(); - // await client.initializeSocket(); - - // for (const envFile of loadedEnv.files) { - // let changeCount = 0; - // for (const itemKey in envFile.items) { - // const item = envFile.items[itemKey]; - // if (item.decorators?.sensitive) { - // if ('value' in item && item.value) { - // console.log('Encrypting', itemKey, envFile.path); - // const encryptedValue = await client.encrypt(item.value); - // delete item.value; - // (item as any).resolverName = 'varlock'; - // (item as any).resolverArgs = [encryptedValue]; - // changeCount++; - // } - // } else { - // if ('resolverName' in item && item.resolverName === 'varlock') { - // console.log('Decrypting', itemKey, envFile.path); - // const encryptedValue = item.resolverArgs[0]; - // if (typeof encryptedValue !== 'string') { - // throw new Error('Expected encrypted value to be a string'); - // } - // const decryptedValue = await client.decrypt(encryptedValue); - // (item as any).value = decryptedValue; - // delete (item as any).resolverName; - // delete (item as any).resolverArgs; - // changeCount++; - // } - // } - // } - - // const updatedEnvFileStr = dumpDotEnvContents(envFile.parsedContents); - // await fs.writeFile(envFile.path, updatedEnvFileStr); - - // log.success(`Updated ${changeCount} items in ${envFile.path}`); - // } - - // console.log(loadedEnv); - - // const unencryptedKeys: Array = []; - // parsedEnv.forEach((item) => { - // if (item.type !== 'item') return; - // if (item.key.startsWith('_VARLOCK_')) return; - // if (!('value' in item) || !item.value) return; - - // unencryptedKeys.push(item.key); - // }); - - // if (unencryptedKeys.length === 0) { - // console.log('No items to encrypt. Exiting...'); - // return; - // } - - // const selectedKeys = await multiselect({ - // message: 'Select env item(s) to encrypt 🔏', - // options: unencryptedKeys.map((key) => ({ - // value: key, - // label: key, - // })), - // initialValues: unencryptedKeys, - // required: false, - // }); - - // if (isCancel(selectedKeys) || !selectedKeys.length) { - // console.log('No items selected. Exiting...'); - // return; - // } - - // for (const item of parsedEnv) { - // if (item.type === 'item' && selectedKeys.includes(item.key)) { - // if (!('value' in item) || !item.value) throw new Error(`Item ${item.key} has no value`); - // const encryptedValue = await client.encrypt(item.value); - // delete item.value; - // (item as any).resolverName = 'varlock'; - // (item as any).resolverArgs = [encryptedValue]; - // } - // } - - // // write the updated env file - - // const updatedEnvFileStr = dumpDotEnvContents(parsedEnv); - // await fs.writeFile(envFilePath, updatedEnvFileStr); - - // outro(`Encrypted ${selectedKeys.length} items!`); + try { + const ciphertext = await localEncrypt.encryptValue(rawValue, keyId); + + console.log('\nCopy this into your .env.local file and rename the key appropriately:\n'); + console.log(`SOME_SENSITIVE_KEY=varlock("local:${ciphertext}")`); + } catch (err) { + if (err instanceof CliExitError) throw err; + throw new CliExitError( + `Encryption failed: ${err instanceof Error ? err.message : err}`, + ); + } }; - diff --git a/packages/varlock/src/cli/commands/lock.command.ts b/packages/varlock/src/cli/commands/lock.command.ts new file mode 100644 index 00000000..5128c7ac --- /dev/null +++ b/packages/varlock/src/cli/commands/lock.command.ts @@ -0,0 +1,26 @@ + +import { define } from 'gunshi'; + +import { type TypedGunshiCommandFn } from '../helpers/gunshi-type-utils'; +import * as localEncrypt from '../../lib/local-encrypt'; + +export const commandSpec = define({ + name: 'lock', + description: 'Lock the encryption daemon, requiring biometric for next decrypt', +}); + +export const commandFn: TypedGunshiCommandFn = async () => { + const backend = localEncrypt.getBackendInfo(); + + if (!backend.biometricAvailable) { + console.log(`The ${backend.type} backend does not support biometric lock.`); + return; + } + + try { + await localEncrypt.lockSession(); + console.log('Encryption session locked. Biometric authentication will be required for next decrypt.'); + } catch { + console.log('No encryption daemon is running — nothing to lock.'); + } +}; diff --git a/packages/varlock/src/env-graph/lib/env-graph.ts b/packages/varlock/src/env-graph/lib/env-graph.ts index 9c498fdc..28469081 100644 --- a/packages/varlock/src/env-graph/lib/env-graph.ts +++ b/packages/varlock/src/env-graph/lib/env-graph.ts @@ -4,6 +4,7 @@ import { ConfigItem } from './config-item'; import { EnvGraphDataSource, FileBasedDataSource, ImportAliasSource } from './data-source'; import { BaseResolvers, createResolver, type ResolverChildClass } from './resolver'; +import { VarlockResolver } from '../../lib/local-encrypt/builtin-resolver'; import { BaseDataTypes, type EnvGraphDataTypeFactory } from './data-types'; import { findGraphCycles, getTransitiveDeps, type GraphAdjacencyList } from './graph-utils'; import { ResolutionError, SchemaError } from './errors'; @@ -227,6 +228,8 @@ export class EnvGraph { for (const resolverClass of BaseResolvers) { this.registerResolver(resolverClass); } + // register built-in varlock() resolver for local encryption + this.registerResolver(VarlockResolver); // base root decorators (envFlag, generateTypes, import, etc) for (const rootDec of builtInRootDecorators) { this.registerRootDecorator(rootDec); diff --git a/packages/varlock/src/lib/local-encrypt/binary-resolver.ts b/packages/varlock/src/lib/local-encrypt/binary-resolver.ts new file mode 100644 index 00000000..08fed488 --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/binary-resolver.ts @@ -0,0 +1,140 @@ +/** + * Resolves the path to the platform-specific native helper binary. + * + * Resolution order: + * 1. SEA sibling: same directory as the running varlock binary (install.sh, homebrew) + * 2. Bundled in npm package: native-bins/[-]/ within the varlock package + * 3. Dev fallback: walk up from __dirname to find build output + * + * Returns undefined if no binary is found (file-based fallback will be used instead). + */ + +import path from 'node:path'; +import fs from 'node:fs'; +import { fileURLToPath } from 'node:url'; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); + +const BINARY_NAME = 'varlock-local-encrypt'; +const MACOS_APP_BUNDLE = 'VarlockEnclave.app'; + +/** Get the binary name for the current platform */ +function getPlatformBinaryName(): string { + return process.platform === 'win32' ? `${BINARY_NAME}.exe` : BINARY_NAME; +} + +/** Get the subdirectory name within native-bins/ for the current platform */ +function getNativeBinSubdir(): string { + if (process.platform === 'darwin') return 'darwin'; + if (process.platform === 'win32') return `win32-${process.arch}`; + return `${process.platform}-${process.arch}`; +} + +/** + * Resolve the macOS .app bundle binary path, or fall back to bare binary. + */ +function resolveMacOSBinary(dir: string): string | undefined { + // Try .app bundle first (needed for custom Touch ID icon) + const appBundlePath = path.join(dir, MACOS_APP_BUNDLE, 'Contents', 'MacOS', BINARY_NAME); + if (fs.existsSync(appBundlePath)) return appBundlePath; + + // Fall back to bare binary + const barePath = path.join(dir, BINARY_NAME); + if (fs.existsSync(barePath)) return barePath; + + return undefined; +} + +/** + * Resolve the binary path for Linux/Windows. + */ +function resolveStandardBinary(dir: string): string | undefined { + const binaryPath = path.join(dir, getPlatformBinaryName()); + if (fs.existsSync(binaryPath)) return binaryPath; + return undefined; +} + +/** + * Resolve binary from a directory, handling macOS .app bundle vs standard binary. + */ +function resolveBinaryFromDir(dir: string): string | undefined { + if (process.platform === 'darwin') return resolveMacOSBinary(dir); + return resolveStandardBinary(dir); +} + +/** + * Strategy 1: Look for the binary next to the running varlock binary. + * This is the primary path for binary/SEA distribution (install.sh, homebrew). + */ +function resolveSeaSibling(): string | undefined { + const execDir = path.dirname(process.execPath); + return resolveBinaryFromDir(execDir); +} + +/** + * Strategy 2: Look for the binary bundled in the varlock npm package. + * native-bins// + */ +function resolveNpmBundled(): string | undefined { + // __dirname points to the compiled dist/ or src/ directory within the varlock package + // native-bins/ is a sibling to dist/ and src/ + const nativeBinsDir = path.resolve(__dirname, '..', '..', '..', 'native-bins', getNativeBinSubdir()); + if (fs.existsSync(nativeBinsDir)) return resolveBinaryFromDir(nativeBinsDir); + + // Also check one level up (when running from dist/) + const altDir = path.resolve(__dirname, '..', 'native-bins', getNativeBinSubdir()); + if (fs.existsSync(altDir)) return resolveBinaryFromDir(altDir); + + return undefined; +} + +/** + * Strategy 3: Development fallback — look for build output in the monorepo. + * Walks up from __dirname looking for native binary build output + */ +function resolveDevFallback(): string | undefined { + let dir = __dirname; + for (let i = 0; i < 10; i++) { + const parent = path.dirname(dir); + if (parent === dir) break; + dir = parent; + + // Check for Swift build output (macOS) + if (process.platform === 'darwin') { + const swiftBuild = path.join(dir, 'packages', 'encryption-binary-swift', 'swift', '.build', 'release', 'VarlockEnclave'); + if (fs.existsSync(swiftBuild)) return swiftBuild; + } + + // Check for Rust build output (Linux/Windows) + const rustBuild = path.join(dir, 'packages', 'encryption-binary-rust', 'target', 'release', getPlatformBinaryName()); + if (fs.existsSync(rustBuild)) return rustBuild; + } + + return undefined; +} + +/** + * Ensure the binary at the given path is executable. + * GitHub Actions artifact upload/download strips execute permissions, + * and some extraction tools may do the same. + */ +function ensureExecutable(binaryPath: string): string { + try { + fs.accessSync(binaryPath, fs.constants.X_OK); + } catch { + // Not executable — try to fix it + if (process.platform !== 'win32') { + fs.chmodSync(binaryPath, 0o755); + } + } + return binaryPath; +} + +/** + * Resolve the native helper binary path. + * Returns undefined if no binary is found — caller should fall back to pure JS. + */ +export function resolveNativeBinary(): string | undefined { + const resolved = resolveSeaSibling() ?? resolveNpmBundled() ?? resolveDevFallback(); + return resolved ? ensureExecutable(resolved) : undefined; +} diff --git a/packages/varlock/src/lib/local-encrypt/builtin-resolver.ts b/packages/varlock/src/lib/local-encrypt/builtin-resolver.ts new file mode 100644 index 00000000..9b7aebfb --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/builtin-resolver.ts @@ -0,0 +1,144 @@ +/** + * Built-in varlock() resolver function. + * + * Replaces the plugin-based resolver from @varlock/secure-enclave-plugin. + * Works cross-platform using the local-encrypt abstraction layer. + */ + +import fs from 'node:fs'; +import { createResolver, Resolver } from '../../env-graph/lib/resolver'; +import { ResolutionError, SchemaError } from '../../env-graph/lib/errors'; +import * as localEncrypt from './index'; + +const LOCAL_PREFIX = 'local:'; +const PLUGIN_ICON = 'mdi:fingerprint'; + +function escapeRegExp(str: string) { + return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); +} + +type VarlockResolverState = { + mode: 'decrypt'; + payload: string; +} | { + mode: 'prompt'; + itemKey: string; + sourceFilePath: string | undefined; +}; + +function writeBackEncryptedValue(itemKey: string, ciphertext: string, sourceFilePath: string | undefined) { + if (!sourceFilePath) return; + const currentContents = fs.readFileSync(sourceFilePath, 'utf-8'); + const pattern = new RegExp(`^(${escapeRegExp(itemKey)}\\s*=\\s*)varlock\\(prompt(?:=\\S*)?\\)`, 'm'); + const prefixedCiphertext = `${LOCAL_PREFIX}${ciphertext}`; + const updatedContents = currentContents.replace(pattern, `$1varlock("${prefixedCiphertext}")`); + if (updatedContents !== currentContents) { + fs.writeFileSync(sourceFilePath, updatedContents); + } +} + +export const VarlockResolver: typeof Resolver = createResolver({ + name: 'varlock', + label: 'Decrypt locally encrypted value', + icon: PLUGIN_ICON, + argsSchema: { + type: 'mixed', + arrayMinLength: 0, + }, + process(): VarlockResolverState { + // Check for prompt mode: varlock(prompt=1) or varlock(prompt) + const promptArg = this.objArgs?.prompt; + if (promptArg) { + // Resolver doesn't expose parent item in its type, but it's available at runtime + const parent = (this as any).parent; + const itemKey = parent?.key || 'unknown'; + const dataSource = this.dataSource as any; + const sourceFilePath = dataSource?.fullPath as string | undefined; + return { mode: 'prompt', itemKey, sourceFilePath }; + } + + // Normal mode: varlock("encrypted-payload") + if (!this.arrArgs || this.arrArgs.length !== 1) { + throw new SchemaError('varlock() expects a single encrypted payload string, or prompt=1 to enter a new value'); + } + if (!this.arrArgs[0]?.isStatic) { + throw new SchemaError('varlock() expects a single static encrypted payload string'); + } + const payload = this.arrArgs[0].staticValue; + if (typeof payload !== 'string') { + throw new SchemaError('varlock() expects a string argument'); + } + return { mode: 'decrypt', payload }; + }, + async resolve(state: VarlockResolverState) { + // Ensure a key exists (first-time setup) + await localEncrypt.ensureKey(); + + if (state.mode === 'decrypt') { + let ciphertext = state.payload; + if (ciphertext.startsWith(LOCAL_PREFIX)) { + ciphertext = ciphertext.slice(LOCAL_PREFIX.length); + } + try { + return await localEncrypt.decryptValue(ciphertext); + } catch (err) { + const backend = localEncrypt.getBackendInfo(); + throw new ResolutionError( + `Decryption failed: ${err instanceof Error ? err.message : err}`, + { + tip: [ + `Backend: ${backend.type} (${backend.hardwareBacked ? 'hardware-backed' : 'file-based'})`, + 'Make sure the encryption key has not been deleted.', + 'Run `varlock encrypt --help` for more info.', + ].join('\n'), + }, + ); + } + } + + // Prompt mode: prompt user for secret, encrypt it, write back to file + const { itemKey, sourceFilePath } = state; + const backend = localEncrypt.getBackendInfo(); + + // Use daemon's native dialog on macOS Secure Enclave + if (backend.type === 'secure-enclave' && backend.biometricAvailable) { + const { DaemonClient } = await import('./daemon-client'); + const client = new DaemonClient(); + const ciphertext = await client.promptSecret({ + itemKey, + message: `Enter the secret value for ${itemKey}:`, + }); + + if (!ciphertext) { + throw new ResolutionError('Secret input was cancelled', { + tip: 'Run varlock again and enter a value, or replace prompt=1 with an encrypted value', + }); + } + + writeBackEncryptedValue(itemKey, ciphertext, sourceFilePath); + return localEncrypt.decryptValue(ciphertext); + } + + // Terminal prompt for file-based backend + if (!process.stdout.isTTY || !process.stdin.isTTY) { + throw new ResolutionError( + `No encrypted value found for ${itemKey}`, + { + tip: `Run \`varlock encrypt --file ${sourceFilePath || ''}\` to encrypt this value interactively.`, + }, + ); + } + + const { password, isCancel } = await import('@clack/prompts'); + const rawValue = await password({ message: `Enter the secret value for ${itemKey}:` }); + if (isCancel(rawValue) || !rawValue) { + throw new ResolutionError('Secret input was cancelled', { + tip: 'Run varlock again and enter a value, or replace prompt=1 with an encrypted value', + }); + } + + const ciphertext = await localEncrypt.encryptValue(rawValue); + writeBackEncryptedValue(itemKey, ciphertext, sourceFilePath); + return rawValue; + }, +}); diff --git a/packages/varlock/src/lib/local-encrypt/crypto.test.ts b/packages/varlock/src/lib/local-encrypt/crypto.test.ts new file mode 100644 index 00000000..baeca237 --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/crypto.test.ts @@ -0,0 +1,95 @@ +import { describe, it, expect } from 'vitest'; +import { createKeyPair, encrypt, decrypt } from './crypto'; + +describe('ECIES crypto', () => { + it('round-trips encrypt → decrypt', async () => { + const keyPair = await createKeyPair(); + const plaintext = 'hello world — this is a secret!'; + + const ciphertext = await encrypt(keyPair.publicKey, plaintext); + const decrypted = await decrypt(keyPair.privateKey, keyPair.publicKey, ciphertext); + + expect(decrypted).toBe(plaintext); + }); + + it('produces different ciphertext each time (random nonce)', async () => { + const keyPair = await createKeyPair(); + const plaintext = 'same input'; + + const ct1 = await encrypt(keyPair.publicKey, plaintext); + const ct2 = await encrypt(keyPair.publicKey, plaintext); + + expect(ct1).not.toBe(ct2); + + // But both decrypt to the same value + expect(await decrypt(keyPair.privateKey, keyPair.publicKey, ct1)).toBe(plaintext); + expect(await decrypt(keyPair.privateKey, keyPair.publicKey, ct2)).toBe(plaintext); + }); + + it('fails with wrong private key', async () => { + const keyPair1 = await createKeyPair(); + const keyPair2 = await createKeyPair(); + const plaintext = 'secret'; + + const ciphertext = await encrypt(keyPair1.publicKey, plaintext); + + await expect(decrypt(keyPair2.privateKey, keyPair2.publicKey, ciphertext)).rejects.toThrow(); + }); + + it('fails with truncated payload', async () => { + const keyPair = await createKeyPair(); + const ciphertext = await encrypt(keyPair.publicKey, 'test'); + + // Truncate the base64 payload + const truncated = ciphertext.slice(0, 20); + await expect(decrypt(keyPair.privateKey, keyPair.publicKey, truncated)).rejects.toThrow('Payload too short'); + }); + + it('fails with wrong version byte', async () => { + const keyPair = await createKeyPair(); + const ciphertext = await encrypt(keyPair.publicKey, 'test'); + + // Decode, change version byte, re-encode + const buf = Buffer.from(ciphertext, 'base64'); + buf[0] = 0xFF; + const tampered = buf.toString('base64'); + + await expect(decrypt(keyPair.privateKey, keyPair.publicKey, tampered)).rejects.toThrow( + 'Unsupported payload version', + ); + }); + + it('handles empty string', async () => { + const keyPair = await createKeyPair(); + const ciphertext = await encrypt(keyPair.publicKey, ''); + const decrypted = await decrypt(keyPair.privateKey, keyPair.publicKey, ciphertext); + expect(decrypted).toBe(''); + }); + + it('handles unicode and emoji', async () => { + const keyPair = await createKeyPair(); + const plaintext = 'こんにちは 🔐 résumé café'; + const ciphertext = await encrypt(keyPair.publicKey, plaintext); + const decrypted = await decrypt(keyPair.privateKey, keyPair.publicKey, ciphertext); + expect(decrypted).toBe(plaintext); + }); + + it('handles large payloads', async () => { + const keyPair = await createKeyPair(); + const plaintext = 'x'.repeat(100_000); + const ciphertext = await encrypt(keyPair.publicKey, plaintext); + const decrypted = await decrypt(keyPair.privateKey, keyPair.publicKey, ciphertext); + expect(decrypted).toBe(plaintext); + }); + + it('payload has correct structure', async () => { + const keyPair = await createKeyPair(); + const ciphertext = await encrypt(keyPair.publicKey, 'test'); + const payload = Buffer.from(ciphertext, 'base64'); + + // version(1) + ephemeralPubKey(65) + nonce(12) + ciphertext(4 for "test") + tag(16) = 98 + expect(payload[0]).toBe(0x01); // version + expect(payload[1]).toBe(0x04); // uncompressed point prefix + expect(payload.length).toBe(1 + 65 + 12 + 4 + 16); // 98 bytes + }); +}); diff --git a/packages/varlock/src/lib/local-encrypt/crypto.ts b/packages/varlock/src/lib/local-encrypt/crypto.ts new file mode 100644 index 00000000..ee51f8c1 --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/crypto.ts @@ -0,0 +1,247 @@ +/** + * Pure JS ECIES implementation using Node.js Web Crypto API. + * + * Wire-compatible with the Swift Secure Enclave implementation: + * - P-256 ECDH key agreement + * - HKDF-SHA256 (salt: "varlock-ecies-v1", info: ephemeralPub || recipientPub) + * - AES-256-GCM with random 12-byte nonce + * - Payload: version(1) | ephemeralPubKey(65) | nonce(12) | ciphertext(N) | tag(16) + * + * Adapted from PR #19's apple-crypto.ts, modified to match the custom ECIES scheme + * used by the Swift SecureEnclaveManager rather than Apple's built-in variant. + */ + +import { webcrypto } from 'node:crypto'; + +const subtle = webcrypto.subtle; + +const PAYLOAD_VERSION = 0x01; +const HKDF_SALT = new TextEncoder().encode('varlock-ecies-v1'); +const EC_ALGORITHM = { name: 'ECDH', namedCurve: 'P-256' }; + +/** Uncompressed P-256 public key is 65 bytes (0x04 || x(32) || y(32)) */ +const PUBLIC_KEY_LENGTH = 65; +const NONCE_LENGTH = 12; +const TAG_LENGTH = 16; +const HEADER_LENGTH = 1 + PUBLIC_KEY_LENGTH + NONCE_LENGTH; // version + pubkey + nonce + +// Bun's types are stricter about BufferSource (requires ArrayBuffer, not ArrayBufferLike). +// This type assertion is safe — we always work with standard ArrayBuffers. + +const bs = (data: Uint8Array | ArrayBuffer) => data as any; + +// ── Key types ────────────────────────────────────────────────────────── + +export interface EcKeyPair { + /** Base64-encoded uncompressed P-256 public key (65 bytes raw) */ + publicKey: string; + /** Base64-encoded PKCS8 private key */ + privateKey: string; +} + +// ── Utilities ────────────────────────────────────────────────────────── + +function concatBuffers(...buffers: Array): Uint8Array { + const totalLength = buffers.reduce((sum, b) => sum + b.length, 0); + const result = new Uint8Array(totalLength); + let offset = 0; + for (const buf of buffers) { + result.set(buf, offset); + offset += buf.length; + } + return result; +} + +function bufferToBase64(buffer: ArrayBuffer | Uint8Array): string { + if (buffer instanceof Uint8Array) { + return Buffer.from(buffer.buffer, buffer.byteOffset, buffer.byteLength).toString('base64'); + } + return Buffer.from(buffer).toString('base64'); +} + +function base64ToUint8(base64: string): Uint8Array { + const buf = Buffer.from(base64, 'base64'); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength); +} + +// ── HKDF-SHA256 ──────────────────────────────────────────────────────── + +/** + * HKDF-SHA256 (RFC 5869) — matches the Swift SecureEnclaveManager.deriveKey implementation. + * + * We implement this manually rather than using Web Crypto's built-in HKDF because + * the Web Crypto HKDF requires importing the input key material as a CryptoKey, + * which adds complexity. This manual implementation is a direct port of the Swift code. + */ +async function hkdfSha256( + ikm: Uint8Array, + salt: Uint8Array, + info: Uint8Array, + outputByteCount: number, +): Promise { + // HKDF-Extract: PRK = HMAC-SHA256(salt, IKM) + const saltKey = await subtle.importKey('raw', bs(salt), { name: 'HMAC', hash: 'SHA-256' }, false, ['sign']); + const prk = new Uint8Array(await subtle.sign('HMAC', saltKey, bs(ikm))); + + // HKDF-Expand: OKM = T(1) || T(2) || ... + const prkKey = await subtle.importKey('raw', bs(prk), { name: 'HMAC', hash: 'SHA-256' }, false, ['sign']); + const okm = new Uint8Array(outputByteCount); + let t = new Uint8Array(0); + let offset = 0; + let counter = 1; + + while (offset < outputByteCount) { + const input = concatBuffers(t, info, new Uint8Array([counter])); + t = new Uint8Array(await subtle.sign('HMAC', prkKey, bs(input))); + okm.set(t.slice(0, Math.min(t.length, outputByteCount - offset)), offset); + offset += t.length; + counter++; + } + + return okm; +} + +// ── Key management ───────────────────────────────────────────────────── + +/** Import a public key from its base64-encoded uncompressed representation. */ +async function importPublicKey(base64: string): Promise { + return subtle.importKey('raw', bs(base64ToUint8(base64)), EC_ALGORITHM, true, []); +} + +/** Import a private key from its base64-encoded PKCS8 representation. */ +async function importPrivateKey(base64: string): Promise { + return subtle.importKey('pkcs8', bs(base64ToUint8(base64)), EC_ALGORITHM, true, ['deriveBits']); +} + +/** Generate a new P-256 ECDH key pair. */ +export async function createKeyPair(): Promise { + const keyPair = await subtle.generateKey(EC_ALGORITHM, true, ['deriveBits']); + + const publicKeyRaw = await subtle.exportKey('raw', keyPair.publicKey); + const privateKeyPkcs8 = await subtle.exportKey('pkcs8', keyPair.privateKey); + + return { + publicKey: bufferToBase64(publicKeyRaw), + privateKey: bufferToBase64(privateKeyPkcs8), + }; +} + +// ── ECIES encrypt ────────────────────────────────────────────────────── + +/** + * Encrypt plaintext using ECIES with the recipient's public key. + * + * @param publicKeyBase64 - Base64-encoded uncompressed P-256 public key (65 bytes raw) + * @param plaintext - UTF-8 string to encrypt + * @returns Base64-encoded ciphertext payload + */ +export async function encrypt(publicKeyBase64: string, plaintext: string): Promise { + const recipientPublicKey = await importPublicKey(publicKeyBase64); + const recipientPubKeyRaw = base64ToUint8(publicKeyBase64); + + // Generate ephemeral key pair + const ephemeralKeyPair = await subtle.generateKey(EC_ALGORITHM, true, ['deriveBits']); + const ephemeralPubKeyRaw = new Uint8Array(await subtle.exportKey('raw', ephemeralKeyPair.publicKey)); + + // ECDH: ephemeral private × recipient public → shared secret (32 bytes for P-256) + const sharedSecretBits = await subtle.deriveBits( + { name: 'ECDH', public: recipientPublicKey }, + ephemeralKeyPair.privateKey, + 256, + ); + const sharedSecret = new Uint8Array(sharedSecretBits); + + // HKDF-SHA256 → AES-256 key + const info = concatBuffers(ephemeralPubKeyRaw, recipientPubKeyRaw); + const aesKey = await hkdfSha256(sharedSecret, HKDF_SALT, info, 32); + + // AES-256-GCM encrypt + const nonce = webcrypto.getRandomValues(new Uint8Array(NONCE_LENGTH)); + const plaintextBytes = new TextEncoder().encode(plaintext); + + const cryptoKey = await subtle.importKey('raw', bs(aesKey), 'AES-GCM', false, ['encrypt']); + const encrypted = new Uint8Array( + await subtle.encrypt({ name: 'AES-GCM', iv: bs(nonce), tagLength: TAG_LENGTH * 8 }, cryptoKey, bs(plaintextBytes)), + ); + + // Web Crypto appends the tag to ciphertext — split them to match Swift format + const ciphertext = encrypted.slice(0, encrypted.length - TAG_LENGTH); + const tag = encrypted.slice(encrypted.length - TAG_LENGTH); + + // Assemble payload: version(1) | ephemeralPub(65) | nonce(12) | ciphertext(N) | tag(16) + const payload = concatBuffers( + new Uint8Array([PAYLOAD_VERSION]), + ephemeralPubKeyRaw, + nonce, + ciphertext, + tag, + ); + + return bufferToBase64(payload); +} + +// ── ECIES decrypt ────────────────────────────────────────────────────── + +/** + * Decrypt ciphertext using ECIES with the recipient's private key. + * + * @param privateKeyBase64 - Base64-encoded PKCS8 private key + * @param publicKeyBase64 - Base64-encoded uncompressed P-256 public key of the recipient + * @param ciphertextBase64 - Base64-encoded ciphertext payload + * @returns Decrypted UTF-8 string + */ +export async function decrypt( + privateKeyBase64: string, + publicKeyBase64: string, + ciphertextBase64: string, +): Promise { + const payloadBytes = base64ToUint8(ciphertextBase64); + + if (payloadBytes.byteLength < HEADER_LENGTH + TAG_LENGTH) { + throw new Error('Payload too short'); + } + + // Parse payload + const version = payloadBytes[0]; + if (version !== PAYLOAD_VERSION) { + throw new Error(`Unsupported payload version: ${version}`); + } + + const ephemeralPubKeyRaw = payloadBytes.slice(1, 1 + PUBLIC_KEY_LENGTH); + const nonce = payloadBytes.slice(1 + PUBLIC_KEY_LENGTH, HEADER_LENGTH); + const ciphertextAndTag = payloadBytes.slice(HEADER_LENGTH); + + if (ciphertextAndTag.length < TAG_LENGTH) { + throw new Error('Payload too short for tag'); + } + + // Import keys + const privateKey = await importPrivateKey(privateKeyBase64); + const ephemeralPublicKey = await subtle.importKey('raw', bs(ephemeralPubKeyRaw), EC_ALGORITHM, true, []); + + // Recipient public key bytes for HKDF info + const recipientPubKeyRaw = base64ToUint8(publicKeyBase64); + + // ECDH: recipient private × ephemeral public → shared secret + const sharedSecretBits = await subtle.deriveBits( + { name: 'ECDH', public: ephemeralPublicKey }, + privateKey, + 256, + ); + const sharedSecret = new Uint8Array(sharedSecretBits); + + // HKDF-SHA256 → AES-256 key (must match encrypt side) + const info = concatBuffers(ephemeralPubKeyRaw, recipientPubKeyRaw); + const aesKey = await hkdfSha256(sharedSecret, HKDF_SALT, info, 32); + + // AES-256-GCM decrypt + // Web Crypto expects ciphertext + tag concatenated + const cryptoKey = await subtle.importKey('raw', bs(aesKey), 'AES-GCM', false, ['decrypt']); + const decrypted = await subtle.decrypt( + { name: 'AES-GCM', iv: bs(nonce), tagLength: TAG_LENGTH * 8 }, + cryptoKey, + bs(ciphertextAndTag), // already ciphertext || tag + ); + + return new TextDecoder().decode(decrypted); +} diff --git a/packages/varlock/src/lib/local-encrypt/daemon-client.ts b/packages/varlock/src/lib/local-encrypt/daemon-client.ts new file mode 100644 index 00000000..78de9335 --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/daemon-client.ts @@ -0,0 +1,337 @@ +/** + * Daemon client for communicating with the native encryption helper binary. + * + * Handles daemon lifecycle (spawn, connect, reconnect) and IPC messaging + * using the 4-byte LE length-prefixed JSON protocol. + * + * - macOS/Linux: Unix domain socket + * - Windows: named pipe (TODO) + * + * Generalized from the secure-enclave plugin's EnclaveDaemonClient. + */ + +import net from 'node:net'; +import path from 'node:path'; +import fs from 'node:fs'; +import crypto from 'node:crypto'; +import { spawn } from 'node:child_process'; + +import { getUserVarlockDir } from '../user-config-dir'; +import { resolveNativeBinary } from './binary-resolver'; + +function getSocketDir(): string { + return path.join(getUserVarlockDir(), 'local-encrypt'); +} + +function getSocketPath(): string { + if (process.platform === 'win32') { + // Windows named pipe — fixed name shared by all varlock processes + return '\\\\.\\pipe\\varlock-local-encrypt'; + } + return path.join(getSocketDir(), 'daemon.sock'); +} + +function getPidPath(): string { + return path.join(getSocketDir(), 'daemon.pid'); +} + +export class DaemonClient { + private socket: net.Socket | null = null; + private messageQueue = new Map void; + reject: (error: Error) => void; + }>(); + private isConnected = false; + private buffer = Buffer.alloc(0); + private connectingPromise: Promise | null = null; + + async ensureConnected(): Promise { + if (this.isConnected && this.socket) return; + + // Deduplicate concurrent ensureConnected calls — multiple varlock() items + // may resolve concurrently and all call decrypt → ensureConnected + if (this.connectingPromise) return this.connectingPromise; + + this.connectingPromise = this.doConnect(); + try { + await this.connectingPromise; + } finally { + this.connectingPromise = null; + } + } + + /** + * Try to connect to an existing daemon without spawning a new one. + * Returns true if connected, false if no daemon is running. + */ + async tryConnect(): Promise { + if (this.isConnected && this.socket) return true; + const socketPath = getSocketPath(); + try { + await this.connectToSocket(socketPath); + return true; + } catch { + return false; + } + } + + private async doConnect(): Promise { + const socketPath = getSocketPath(); + try { + await this.connectToSocket(socketPath); + return; + } catch { + // Daemon not running, spawn it + } + + await this.spawnDaemon(); + await this.connectToSocket(socketPath); + } + + async decrypt(ciphertext: string, keyId = 'varlock-default'): Promise { + await this.ensureConnected(); + const result = await this.sendMessage({ + action: 'decrypt', + payload: { ciphertext, keyId }, + }); + if (typeof result === 'string') return result; + if (result && typeof result === 'object' && 'error' in result) { + throw new Error(String(result.error)); + } + return String(result); + } + + async promptSecret(opts?: { + itemKey?: string; + message?: string; + keyId?: string; + }): Promise { + await this.ensureConnected(); + try { + const result = await this.sendMessage({ + action: 'prompt-secret', + payload: { + itemKey: opts?.itemKey, + message: opts?.message, + keyId: opts?.keyId, + }, + }); + if (result && typeof result === 'object' && 'ciphertext' in result) { + return result.ciphertext as string; + } + return undefined; + } catch (err) { + if (err instanceof Error && err.message === 'cancelled') return undefined; + throw err; + } + } + + async invalidateSession(): Promise { + await this.ensureConnected(); + await this.sendMessage({ action: 'invalidate-session' }); + } + + cleanup(): void { + for (const { reject } of this.messageQueue.values()) { + reject(new Error('Connection closed')); + } + this.messageQueue.clear(); + this.socket?.end(); + this.socket = null; + this.isConnected = false; + this.buffer = Buffer.alloc(0); + } + + // -- Private -- + + private connectToSocket(socketPath: string): Promise { + return new Promise((resolve, reject) => { + const socket = new net.Socket(); + const timeout = setTimeout(() => { + socket.destroy(); + reject(new Error('Connection timeout')); + }, 5000); + + socket.on('connect', () => { + clearTimeout(timeout); + this.socket = socket; + this.isConnected = true; + this.buffer = Buffer.alloc(0); + resolve(); + }); + + socket.on('data', (data: Buffer) => { + this.handleData(data); + }); + + socket.on('error', (err) => { + clearTimeout(timeout); + this.isConnected = false; + reject(err); + }); + + socket.on('close', () => { + this.isConnected = false; + this.socket = null; + }); + + socket.connect(socketPath); + }); + } + + private handleData(data: Buffer): void { + this.buffer = Buffer.concat([this.buffer, data]); + + while (this.buffer.length >= 4) { + const messageLength = this.buffer.readUInt32LE(0); + if (this.buffer.length < 4 + messageLength) break; + + const messageData = this.buffer.subarray(4, 4 + messageLength); + this.buffer = this.buffer.subarray(4 + messageLength); + + try { + const message = JSON.parse(messageData.toString()); + if (message.id && this.messageQueue.has(message.id)) { + const { resolve: res, reject: rej } = this.messageQueue.get(message.id)!; + this.messageQueue.delete(message.id); + if (message.error) { + rej(new Error(message.error)); + } else { + res(message.result); + } + } + } catch { + // Ignore malformed messages + } + } + } + + private sendMessage(message: Record): Promise { + return new Promise((resolve, reject) => { + if (!this.isConnected || !this.socket) { + reject(new Error('Not connected to daemon')); + return; + } + + const messageId = `${Date.now().toString(36)}-${crypto.randomBytes(4).toString('hex')}`; + const messageWithId = { ...message, id: messageId }; + const jsonData = JSON.stringify(messageWithId); + const messageBytes = Buffer.from(jsonData, 'utf-8'); + + const lengthBuf = Buffer.alloc(4); + lengthBuf.writeUInt32LE(messageBytes.length, 0); + + this.messageQueue.set(messageId, { resolve, reject }); + this.socket.write(Buffer.concat([lengthBuf, messageBytes])); + }); + } + + private async spawnDaemon(): Promise { + const binaryPath = resolveNativeBinary(); + if (!binaryPath) { + throw new Error('Native encryption binary not found — cannot start daemon'); + } + + const socketPath = getSocketPath(); + const pidPath = getPidPath(); + const isWindows = process.platform === 'win32'; + + // Ensure PID directory exists (don't mkdir for Windows pipe paths) + if (!isWindows) { + fs.mkdirSync(path.dirname(socketPath), { recursive: true }); + } + fs.mkdirSync(path.dirname(pidPath), { recursive: true }); + + // Check for existing daemon via PID + if (fs.existsSync(pidPath)) { + try { + const pid = parseInt(fs.readFileSync(pidPath, 'utf-8').trim(), 10); + process.kill(pid, 0); // Throws if process doesn't exist + // Process is alive — wait briefly and let ensureConnected retry + await new Promise((r) => { + setTimeout(r, 500); + }); + return; + } catch { + // Stale PID file — clean up both PID and socket + } + } + + // Clean up stale files before spawning + // On Windows, named pipes don't leave files — only clean PID and Unix sockets + if (!isWindows) { + for (const file of [socketPath, pidPath]) { + if (fs.existsSync(file)) { + fs.unlinkSync(file); + } + } + // Verify socket file is actually gone + if (fs.existsSync(socketPath)) { + throw new Error(`Failed to clean up stale socket file: ${socketPath}`); + } + } else { + // Clean PID file only on Windows + if (fs.existsSync(pidPath)) { + fs.unlinkSync(pidPath); + } + } + + return new Promise((resolve, reject) => { + const child = spawn(binaryPath, [ + 'daemon', + '--socket-path', + socketPath, + '--pid-path', + pidPath, + ], { + detached: true, + stdio: ['ignore', 'pipe', 'pipe'], + }); + + const timeout = setTimeout(() => { + reject(new Error('Daemon failed to start within timeout')); + }, 10000); + + let stdoutData = ''; + let stderrData = ''; + + child.stdout!.on('data', (data: Buffer) => { + stdoutData += data.toString(); + try { + const parsed = JSON.parse(stdoutData); + if (parsed.ready) { + clearTimeout(timeout); + child.unref(); + child.stdout!.destroy(); + child.stderr!.destroy(); + resolve(); + } + } catch { + // Incomplete JSON, keep buffering + } + }); + + child.stderr!.on('data', (data: Buffer) => { + stderrData += data.toString(); + }); + + child.on('error', (err) => { + clearTimeout(timeout); + reject(new Error(`Failed to spawn daemon: ${err.message}`)); + }); + + child.on('exit', (code) => { + clearTimeout(timeout); + if (code !== 0) { + const details = [ + stderrData.trim() && `stderr: ${stderrData.trim()}`, + stdoutData.trim() && `stdout: ${stdoutData.trim()}`, + `binary: ${binaryPath}`, + `socket: ${socketPath}`, + ].filter(Boolean).join('\n'); + reject(new Error(`Daemon exited with code ${code}\n${details}`)); + } + }); + }); + } +} diff --git a/packages/varlock/src/lib/local-encrypt/file-backend.test.ts b/packages/varlock/src/lib/local-encrypt/file-backend.test.ts new file mode 100644 index 00000000..afdcbf06 --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/file-backend.test.ts @@ -0,0 +1,81 @@ +import { + describe, it, expect, beforeEach, afterEach, vi, +} from 'vitest'; +import fs from 'node:fs'; +import path from 'node:path'; +import os from 'node:os'; +import { + keyExists, generateKey, deleteKey, listKeys, encryptValue, decryptValue, +} from './file-backend'; + +// Use a temp directory for all key operations during tests +const testDir = path.join(os.tmpdir(), `varlock-test-${process.pid}`); + +vi.mock('../user-config-dir', () => ({ + getUserVarlockDir: () => testDir, +})); + +beforeEach(() => { + fs.mkdirSync(testDir, { recursive: true }); +}); + +afterEach(() => { + fs.rmSync(testDir, { recursive: true, force: true }); +}); + +describe('file-backend', () => { + it('generates and checks key existence', async () => { + expect(keyExists('test-key')).toBe(false); + const result = await generateKey('test-key'); + expect(result.keyId).toBe('test-key'); + expect(result.publicKey).toBeTruthy(); + expect(keyExists('test-key')).toBe(true); + }); + + it('uses default key id', async () => { + await generateKey(); + expect(keyExists()).toBe(true); + expect(keyExists('varlock-default')).toBe(true); + }); + + it('lists keys', async () => { + expect(listKeys()).toEqual([]); + await generateKey('key-a'); + await generateKey('key-b'); + const keys = listKeys(); + expect(keys).toContain('key-a'); + expect(keys).toContain('key-b'); + expect(keys).toHaveLength(2); + }); + + it('deletes keys', async () => { + await generateKey('to-delete'); + expect(keyExists('to-delete')).toBe(true); + expect(deleteKey('to-delete')).toBe(true); + expect(keyExists('to-delete')).toBe(false); + expect(deleteKey('nonexistent')).toBe(false); + }); + + it('round-trips encrypt → decrypt', async () => { + await generateKey('round-trip'); + const plaintext = 'super secret value!'; + const ciphertext = await encryptValue(plaintext, 'round-trip'); + const decrypted = await decryptValue(ciphertext, 'round-trip'); + expect(decrypted).toBe(plaintext); + }); + + it('fails to encrypt with nonexistent key', async () => { + await expect(encryptValue('test', 'nonexistent')).rejects.toThrow('Key not found'); + }); + + it('fails to decrypt with nonexistent key', async () => { + await expect(decryptValue('dGVzdA==', 'nonexistent')).rejects.toThrow('Key not found'); + }); + + it('fails to decrypt with wrong key', async () => { + await generateKey('key-1'); + await generateKey('key-2'); + const ciphertext = await encryptValue('secret', 'key-1'); + await expect(decryptValue(ciphertext, 'key-2')).rejects.toThrow(); + }); +}); diff --git a/packages/varlock/src/lib/local-encrypt/file-backend.ts b/packages/varlock/src/lib/local-encrypt/file-backend.ts new file mode 100644 index 00000000..e8107c3b --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/file-backend.ts @@ -0,0 +1,103 @@ +/** + * File-based local encryption backend. + * + * Stores P-256 ECDH key pairs as JSON files on disk with restricted permissions. + * Uses the pure JS ECIES implementation for all crypto operations. + * Works on all platforms — no native binary required. + */ + +import fs from 'node:fs'; +import path from 'node:path'; +import { getUserVarlockDir } from '../user-config-dir'; +import { createKeyPair, encrypt, decrypt } from './crypto'; + +const KEY_STORE_SUBDIR = 'local-encrypt/keys'; +const DEFAULT_KEY_ID = 'varlock-default'; + +interface StoredKeyPair { + keyId: string; + publicKey: string; + privateKey: string; + createdAt: string; +} + +function getKeyStorePath(): string { + return path.join(getUserVarlockDir(), KEY_STORE_SUBDIR); +} + +function getKeyFilePath(keyId: string): string { + return path.join(getKeyStorePath(), `${keyId}.json`); +} + +// ── Key management ───────────────────────────────────────────────────── + +export function keyExists(keyId: string = DEFAULT_KEY_ID): boolean { + return fs.existsSync(getKeyFilePath(keyId)); +} + +export async function generateKey(keyId: string = DEFAULT_KEY_ID): Promise<{ keyId: string; publicKey: string }> { + const keyPair = await createKeyPair(); + + const stored: StoredKeyPair = { + keyId, + publicKey: keyPair.publicKey, + privateKey: keyPair.privateKey, + createdAt: new Date().toISOString(), + }; + + const keyStorePath = getKeyStorePath(); + fs.mkdirSync(keyStorePath, { recursive: true }); + + const filePath = getKeyFilePath(keyId); + fs.writeFileSync(filePath, JSON.stringify(stored, null, 2), { mode: 0o600 }); + + return { keyId, publicKey: keyPair.publicKey }; +} + +export function deleteKey(keyId: string = DEFAULT_KEY_ID): boolean { + const filePath = getKeyFilePath(keyId); + try { + fs.unlinkSync(filePath); + return true; + } catch { + return false; + } +} + +export function listKeys(): Array { + const keyStorePath = getKeyStorePath(); + try { + return fs.readdirSync(keyStorePath) + .filter((f) => f.endsWith('.json')) + .map((f) => f.slice(0, -5)); + } catch { + return []; + } +} + +// ── Internal key loading ─────────────────────────────────────────────── + +function loadKeyPair(keyId: string): StoredKeyPair { + const filePath = getKeyFilePath(keyId); + if (!fs.existsSync(filePath)) { + throw new Error(`Key not found: ${keyId}`); + } + const data = fs.readFileSync(filePath, 'utf-8'); + return JSON.parse(data) as StoredKeyPair; +} + +function getPublicKey(keyId: string): string { + return loadKeyPair(keyId).publicKey; +} + +// ── Encrypt / Decrypt ────────────────────────────────────────────────── + +export async function encryptValue(plaintext: string, keyId: string = DEFAULT_KEY_ID): Promise { + const publicKey = getPublicKey(keyId); + return encrypt(publicKey, plaintext); +} + +export async function decryptValue(ciphertext: string, keyId: string = DEFAULT_KEY_ID): Promise { + const stored = loadKeyPair(keyId); + return decrypt(stored.privateKey, stored.publicKey, ciphertext); +} diff --git a/packages/varlock/src/lib/local-encrypt/index.ts b/packages/varlock/src/lib/local-encrypt/index.ts new file mode 100644 index 00000000..ecdda221 --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/index.ts @@ -0,0 +1,195 @@ +/** + * Cross-platform local encryption for varlock. + * + * Provides a unified API for encrypting/decrypting secrets using the best + * available backend on the current platform: + * + * 1. macOS Secure Enclave (Swift binary) — hardware-backed, Touch ID + * 2. Windows TPM/Hello (Rust binary) — hardware-backed, Windows Hello (TODO) + * 3. Linux TPM2 (Rust binary) — hardware-backed (TODO) + * 4. File-based (pure JS) — universal fallback, no native binary needed + */ + +import { execFileSync } from 'node:child_process'; +import { resolveNativeBinary } from './binary-resolver'; +import { DaemonClient } from './daemon-client'; +import * as fileBackend from './file-backend'; +import type { BackendInfo, BackendType, NativeStatusResult } from './types'; + +export type { BackendInfo, BackendType } from './types'; + +const DEFAULT_KEY_ID = 'varlock-default'; + +// ── Native binary one-shot commands ──────────────────────────────────── + +function runNativeBinary(args: Array): string { + const binaryPath = resolveNativeBinary(); + if (!binaryPath) { + throw new Error('Native binary not found'); + } + return execFileSync(binaryPath, args, { + encoding: 'utf-8', + timeout: 30_000, + }).trim(); +} + +function runNativeBinaryJson>(args: Array): T { + const output = runNativeBinary(args); + const parsed = JSON.parse(output); + if (parsed.error) { + throw new Error(parsed.error); + } + return parsed as T; +} + +// ── Backend detection ────────────────────────────────────────────────── + +let cachedBackendInfo: BackendInfo | undefined; + +function detectBackendType(): BackendType { + const binaryPath = resolveNativeBinary(); + if (!binaryPath) return 'file'; + + switch (process.platform) { + case 'darwin': return 'secure-enclave'; + case 'win32': return 'windows-tpm'; + case 'linux': return 'linux-tpm'; + default: return 'file'; + } +} + +/** Get information about the active encryption backend. */ +export function getBackendInfo(): BackendInfo { + if (cachedBackendInfo) return cachedBackendInfo; + + const type = detectBackendType(); + const binaryPath = type !== 'file' ? resolveNativeBinary() : undefined; + + if (type !== 'file' && binaryPath) { + // Query the native binary for its actual capabilities + try { + const status = runNativeBinaryJson(['status']); + cachedBackendInfo = { + type, + platform: process.platform, + hardwareBacked: status.hardwareBacked, + biometricAvailable: status.biometricAvailable, + binaryPath, + }; + } catch { + // Binary failed — fall back to reasonable defaults + cachedBackendInfo = { + type, + platform: process.platform, + hardwareBacked: type === 'secure-enclave', + biometricAvailable: type === 'secure-enclave', + binaryPath, + }; + } + } else { + cachedBackendInfo = { + type, + platform: process.platform, + hardwareBacked: false, + biometricAvailable: false, + binaryPath: undefined, + }; + } + + return cachedBackendInfo; +} + +// ── Daemon client (singleton for biometric-enabled backends) ─────────── + +let daemonClient: DaemonClient | undefined; + +function getDaemonClient(): DaemonClient { + daemonClient ||= new DaemonClient(); + return daemonClient; +} + +// ── Key management ───────────────────────────────────────────────────── + +/** Check if a key exists. */ +export function keyExists(keyId: string = DEFAULT_KEY_ID): boolean { + const backend = getBackendInfo(); + if (backend.type === 'file') { + return fileBackend.keyExists(keyId); + } + const result = runNativeBinaryJson<{ exists: boolean }>(['key-exists', '--key-id', keyId]); + return result.exists; +} + +/** Generate a new encryption key. */ +export async function generateKey(keyId: string = DEFAULT_KEY_ID): Promise<{ keyId: string; publicKey: string }> { + const backend = getBackendInfo(); + if (backend.type === 'file') { + return fileBackend.generateKey(keyId); + } + return runNativeBinaryJson<{ keyId: string; publicKey: string }>(['generate-key', '--key-id', keyId]); +} + +/** Ensure a key exists, generating one if necessary. */ +export async function ensureKey(keyId: string = DEFAULT_KEY_ID): Promise { + if (!keyExists(keyId)) { + await generateKey(keyId); + } +} + +// ── Encrypt / Decrypt ────────────────────────────────────────────────── + +/** + * Encrypt a plaintext value. + * + * For hardware-backed backends, encryption uses the public key only (no biometric needed). + * For file-based backend, uses the pure JS ECIES implementation. + */ +export async function encryptValue(plaintext: string, keyId: string = DEFAULT_KEY_ID): Promise { + const backend = getBackendInfo(); + if (backend.type === 'file') { + return fileBackend.encryptValue(plaintext, keyId); + } + // Native binary encrypt (one-shot, no biometric needed for encrypt) + const b64Input = Buffer.from(plaintext, 'utf-8').toString('base64'); + const result = runNativeBinaryJson<{ ciphertext: string }>(['encrypt', '--key-id', keyId, '--data', b64Input]); + return result.ciphertext; +} + +/** + * Decrypt a ciphertext value. + * + * For biometric-enabled backends (macOS Secure Enclave, Windows Hello), + * uses the daemon client for session caching (avoids repeated biometric prompts). + * For file-based backend, uses the pure JS ECIES implementation. + */ +export async function decryptValue(ciphertext: string, keyId: string = DEFAULT_KEY_ID): Promise { + const backend = getBackendInfo(); + if (backend.type === 'file') { + return fileBackend.decryptValue(ciphertext, keyId); + } + + // Use daemon client for biometric backends (session caching) + if (backend.biometricAvailable) { + const client = getDaemonClient(); + return client.decrypt(ciphertext, keyId); + } + + // Non-biometric native backend (e.g., Linux TPM without polkit) — one-shot + const result = runNativeBinaryJson<{ plaintext: string }>(['decrypt', '--key-id', keyId, '--data', ciphertext]); + return result.plaintext; +} + +/** + * Invalidate the biometric session, requiring re-authentication for next decrypt. + * Connects to the running daemon without spawning one (varlock lock runs in a separate process). + */ +export async function lockSession(): Promise { + const backend = getBackendInfo(); + if (!backend.biometricAvailable) return; + const client = getDaemonClient(); + const connected = await client.tryConnect(); + if (!connected) { + throw new Error('No encryption daemon is running'); + } + await client.invalidateSession(); +} diff --git a/packages/varlock/src/lib/local-encrypt/types.ts b/packages/varlock/src/lib/local-encrypt/types.ts new file mode 100644 index 00000000..44def1ff --- /dev/null +++ b/packages/varlock/src/lib/local-encrypt/types.ts @@ -0,0 +1,42 @@ +/** + * Shared types for the local encryption system. + */ + +/** Which encryption backend is active */ +export type BackendType = ( + | 'secure-enclave' // macOS Secure Enclave (Swift binary) + | 'windows-tpm' // Windows native (Rust binary) — DPAPI now, TPM/Hello planned + | 'linux-tpm' // Linux native (Rust binary) — kernel keyring now, TPM planned + | 'file' // Pure JS file-based (universal fallback) +); + +/** Information about the active encryption backend */ +export interface BackendInfo { + type: BackendType; + platform: NodeJS.Platform; + hardwareBacked: boolean; + biometricAvailable: boolean; + binaryPath?: string; +} + +/** IPC daemon message format (length-prefixed JSON over Unix socket or named pipe) */ +export interface DaemonMessage { + id: string; + action: 'decrypt' | 'encrypt' | 'prompt-secret' | 'ping' | 'invalidate-session'; + payload?: Record; +} + +/** IPC daemon response format */ +export interface DaemonResponse { + id: string; + result?: unknown; + error?: string; +} + +/** Result from the status command of a native binary */ +export interface NativeStatusResult { + backend: string; + hardwareBacked: boolean; + biometricAvailable: boolean; + keys: Array; +} diff --git a/scripts/check-release-packages.ts b/scripts/check-release-packages.ts new file mode 100644 index 00000000..30a4d420 --- /dev/null +++ b/scripts/check-release-packages.ts @@ -0,0 +1,65 @@ +/** + * Determines which packages would be published in a preview release. + * Outputs a JSON array of package paths and a flag for whether varlock is included. + * + * Usage: + * bun run scripts/check-release-packages.ts + * + * Outputs (via GITHUB_OUTPUT if available): + * packages=["path1","path2"] + * includes-varlock=true|false + */ +import { execSync } from 'node:child_process'; +import fs from 'node:fs'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; +import { listWorkspaces } from './list-workspaces'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); +const MONOREPO_ROOT = path.resolve(__dirname, '..'); + +const workspacePackagesInfo = await listWorkspaces(MONOREPO_ROOT); + +const currentBranch = process.env.GITHUB_HEAD_REF || execSync('git branch --show-current').toString().trim(); +let releasePackagePaths: Array; + +if (currentBranch === 'changeset-release/main') { + const gitDiff = execSync('git diff origin/main --name-only').toString(); + const modifiedPackageJsons = gitDiff + .split('\n') + .filter((filePath) => filePath !== 'package.json') + .filter((filePath) => filePath.endsWith('package.json')); + + releasePackagePaths = modifiedPackageJsons + .map((filePath) => `${MONOREPO_ROOT}/${filePath.replace('/package.json', '')}`) + .filter((filePath) => workspacePackagesInfo.some((p) => p.path === filePath)); +} else { + execSync('bunx changeset status --output=changesets-summary.json', { cwd: MONOREPO_ROOT }); + + const changeSetsSummaryRaw = fs.readFileSync(path.join(MONOREPO_ROOT, 'changesets-summary.json'), 'utf8'); + const changeSetsSummary = JSON.parse(changeSetsSummaryRaw); + + releasePackagePaths = changeSetsSummary.releases + .filter((r: any) => r.newVersion !== r.oldVersion) + .map((r: any) => workspacePackagesInfo.find((p) => p.name === r.name)) + .filter(Boolean) + .map((p: any) => p.path); + + fs.unlinkSync(path.join(MONOREPO_ROOT, 'changesets-summary.json')); +} + +// filter out vscode extension which is not released via npm +releasePackagePaths = releasePackagePaths.filter((p: string) => !p.endsWith('packages/vscode-plugin')); + +const includesVarlock = releasePackagePaths.some((p) => p.endsWith('packages/varlock')); + +console.log('Packages to release:', releasePackagePaths); +console.log('Includes varlock:', includesVarlock); + +// Write to GITHUB_OUTPUT if running in CI +const githubOutput = process.env.GITHUB_OUTPUT; +if (githubOutput) { + fs.appendFileSync(githubOutput, `packages=${JSON.stringify(releasePackagePaths)}\n`); + fs.appendFileSync(githubOutput, `includes-varlock=${includesVarlock}\n`); +} diff --git a/scripts/release-preview.ts b/scripts/release-preview.ts index 2f6e45e6..85893882 100644 --- a/scripts/release-preview.ts +++ b/scripts/release-preview.ts @@ -1,63 +1,26 @@ import { execSync, execFileSync } from 'node:child_process'; -import fs from 'node:fs'; import path from 'node:path'; import { fileURLToPath } from 'node:url'; -import { listWorkspaces } from './list-workspaces'; const __filename = fileURLToPath(import.meta.url); const MONOREPO_ROOT = path.resolve(path.dirname(__filename), '..'); -let err: unknown; -try { - const workspacePackagesInfo = await listWorkspaces(MONOREPO_ROOT); - - // Check if we're on changeset-release/main branch - const currentBranch = process.env.GITHUB_HEAD_REF || execSync('git branch --show-current').toString().trim(); - let releasePackagePaths: Array; - - console.log('current branch = ', currentBranch); - - if (currentBranch === 'changeset-release/main') { - // On changeset-release/main branch, find modified package.json files - console.log('Running on changeset-release/main branch, finding modified package.json files...'); - const gitDiff = execSync('git diff origin/main --name-only').toString(); - const modifiedPackageJsons = gitDiff - .split('\n') - .filter((filePath) => filePath !== 'package.json') // skip root package.json - .filter((filePath) => filePath.endsWith('package.json')); - - if (!modifiedPackageJsons.length) { - console.log('No modified package.json files found!'); - process.exit(0); - } - - // Get the workspace paths for modified packages - releasePackagePaths = modifiedPackageJsons - .map((filePath) => `${MONOREPO_ROOT}/${filePath.replace('/package.json', '')}`) - .filter((filePath) => workspacePackagesInfo.some((p) => p.path === filePath)); - } else { - console.log('Running on normal PR, using changesets to determine packages to release...'); - // Regular changeset-based logic - // generate summary of changed (publishable) modules according to changesets - execSync('bunx changeset status --output=changesets-summary.json'); - - const changeSetsSummaryRaw = fs.readFileSync('./changesets-summary.json', 'utf8'); - const changeSetsSummary = JSON.parse(changeSetsSummaryRaw); - - releasePackagePaths = changeSetsSummary.releases - .filter((r: any) => r.newVersion !== r.oldVersion) - .map((r: any) => workspacePackagesInfo.find((p) => p.name === r.name)) - .map((p: any) => p.path); - } +// Accept package paths from RELEASE_PACKAGES env var (set by check-release-packages step) +const releasePackagesEnv = process.env.RELEASE_PACKAGES; +if (!releasePackagesEnv) { + console.error('RELEASE_PACKAGES env var not set — run check-release-packages.ts first'); + process.exit(1); +} - // filter out vscode extension which is not released via npm - releasePackagePaths = releasePackagePaths.filter((p: string) => !p.endsWith('packages/vscode-plugin')); +const releasePackagePaths: Array = JSON.parse(releasePackagesEnv); - if (!releasePackagePaths.length) { - console.log('No packages to release!'); - process.exit(0); - } +if (!releasePackagePaths.length) { + console.log('No packages to release!'); + process.exit(0); +} +let err: unknown; +try { console.log('Updated packages to release:', releasePackagePaths); // Resolve workspace: and catalog: protocols in package.json files before publishing @@ -73,9 +36,4 @@ try { console.error('preview release failed'); console.error(_err); } - -// Only clean up changesets-summary.json if it exists (only created in changeset case) -if (fs.existsSync('./changesets-summary.json')) { - fs.unlinkSync('./changesets-summary.json'); -} process.exit(err ? 1 : 0); diff --git a/scripts/update-homebrew-formula.ts b/scripts/update-homebrew-formula.ts index 3a1c5f77..59d21f4f 100644 --- a/scripts/update-homebrew-formula.ts +++ b/scripts/update-homebrew-formula.ts @@ -5,7 +5,7 @@ const VERSION = process.env.RELEASE_VERSION; // get checksums file from dist-sea since we are running this script just after building the binaries const checksumsStr = await fs.readFile(path.join(import.meta.dirname, '../packages/varlock/dist-sea/checksums.txt'), 'utf-8'); -const checksums = {}; +const checksums: Record = {}; checksumsStr.split('\n').forEach((line) => { if (!line.trim()) return; // skip trailing blank line const [sha256, fileName] = line.split(' '); diff --git a/smoke-tests/smoke-test-plugin/plugins/legacy-global-plugin.js b/smoke-tests/smoke-test-plugin/plugins/legacy-global-plugin.js index 59a9d455..e491efee 100644 --- a/smoke-tests/smoke-test-plugin/plugins/legacy-global-plugin.js +++ b/smoke-tests/smoke-test-plugin/plugins/legacy-global-plugin.js @@ -1,5 +1,7 @@ // Legacy plugin that uses the old implicit `plugin` global (no require). // Used to verify the migration error message is shown. + +// eslint-disable-next-line no-undef plugin.registerResolverFunction({ name: 'legacyTest', argsSchema: { diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000..57da96b9 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,11 @@ +{ + "$schema": "https://json.schemastore.org/tsconfig", + "extends": "@varlock/tsconfig/base.tsconfig.json", + "compilerOptions": { + "noEmit": true, + "types": ["node"], + // bun supports .ts imports natively, but tsc needs this + "allowImportingTsExtensions": true + }, + "include": ["scripts/**/*.ts"] +} diff --git a/turbo.json b/turbo.json index 93c9000e..5c5d5bdf 100644 --- a/turbo.json +++ b/turbo.json @@ -30,6 +30,9 @@ "dependsOn": ["^typecheck"], "inputs": ["package.json", "tsconfig.json", "tsconfig.*.json", "src/**"] }, + "//#typecheck": { + "inputs": ["tsconfig.json", "scripts/**/*.ts"] + }, "dev": { "cache": false, "persistent": true