diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7af4740092..1923a3752a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,6 +10,8 @@ on: description: 'Optional test filter (e.g., "workspace", "tests/file.test.ts", or "-t pattern")' required: false type: string + # This filter is passed to unit tests, integration tests, e2e tests, and storybook tests + # to enable faster iteration when debugging specific test failures in CI jobs: static-check: @@ -85,7 +87,7 @@ jobs: integration-test: name: Integration Tests - runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-16' || 'ubuntu-latest' }} + runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-24.04-32' || 'ubuntu-latest' }} steps: - name: Checkout code uses: actions/checkout@v4 @@ -95,7 +97,7 @@ jobs: - uses: ./.github/actions/setup-cmux - name: Run integration tests with coverage - run: TEST_INTEGRATION=1 bun x jest --coverage ${{ github.event.inputs.test_filter || 'tests' }} + run: TEST_INTEGRATION=1 bun x jest --coverage --maxWorkers=200% --silent ${{ github.event.inputs.test_filter || 'tests' }} env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} @@ -111,6 +113,7 @@ jobs: storybook-test: name: Storybook Interaction Tests runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-16' || 'ubuntu-latest' }} + if: github.event.inputs.test_filter == '' steps: - name: Checkout code uses: actions/checkout@v4 @@ -136,6 +139,7 @@ jobs: e2e-test: name: End-to-End Tests runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-16' || 'ubuntu-latest' }} + if: github.event.inputs.test_filter == '' steps: - name: Checkout code uses: actions/checkout@v4 diff --git a/.gitignore b/.gitignore index e08390f724..6314bc6351 100644 --- a/.gitignore +++ b/.gitignore @@ -104,3 +104,4 @@ tmpfork .cmux-agent-cli storybook-static/ *.tgz +src/test-workspaces/ diff --git a/bun.lock b/bun.lock index 8c65976937..e8a15a5ae6 100644 --- a/bun.lock +++ b/bun.lock @@ -2,7 +2,7 @@ "lockfileVersion": 1, "workspaces": { "": { - "name": "cmux", + "name": "@coder/cmux", "dependencies": { "@ai-sdk/anthropic": "^2.0.29", "@ai-sdk/openai": "^2.0.52", @@ -29,6 +29,7 @@ "markdown-it": "^14.1.0", "minimist": "^1.2.8", "rehype-harden": "^1.1.5", + "shescape": "^2.1.6", "source-map-support": "^0.5.21", "streamdown": "^1.4.0", "undici": "^7.16.0", @@ -116,11 +117,11 @@ "@adobe/css-tools": ["@adobe/css-tools@4.4.4", "", {}, "sha512-Elp+iwUx5rN5+Y8xLt5/GRoG20WGoDCQ/1Fb+1LiGtvwbDavuSk0jhD/eZdckHAuzcDzccnkv+rEjyWfRx18gg=="], - "@ai-sdk/anthropic": ["@ai-sdk/anthropic@2.0.29", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.12" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-kDYYgbBoeTwB+wMuQRE7iFx8dA3jv4kCSB7XtQypP7/lt1P+G1LpeIMTRbwp4wMzaZTfThZBWDCkg/OltDo2VA=="], + "@ai-sdk/anthropic": ["@ai-sdk/anthropic@2.0.37", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.12" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-r2e9BWoobisH9B5b7x3yYG/k9WlsZqa4D94o7gkwktReqrjjv83zNMop4KmlJsh/zBhbsaP8S8SUfiwK+ESxgg=="], - "@ai-sdk/gateway": ["@ai-sdk/gateway@1.0.40", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.12", "@vercel/oidc": "3.0.2" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-zlixM9jac0w0jjYl5gwNq+w9nydvraAmLaZQbbh+QpHU+OPkTIZmyBcKeTq5eGQKQxhi+oquHxzCSKyJx3egGw=="], + "@ai-sdk/gateway": ["@ai-sdk/gateway@2.0.0", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.12", "@vercel/oidc": "3.0.3" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-Gj0PuawK7NkZuyYgO/h5kDK/l6hFOjhLdTq3/Lli1FTl47iGmwhH1IZQpAL3Z09BeFYWakcwUmn02ovIm2wy9g=="], - "@ai-sdk/openai": ["@ai-sdk/openai@2.0.52", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.12" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-n1arAo4+63e6/FFE6z/1ZsZbiOl4cfsoZ3F4i2X7LPIEea786Y2yd7Qdr7AdB4HTLVo3OSb1PHVIcQmvYIhmEA=="], + "@ai-sdk/openai": ["@ai-sdk/openai@2.0.53", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.12" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-GIkR3+Fyif516ftXv+YPSPstnAHhcZxNoR2s8uSHhQ1yBT7I7aQYTVwpjAuYoT3GR+TeP50q7onj2/nDRbT2FQ=="], "@ai-sdk/provider": ["@ai-sdk/provider@2.0.0", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA=="], @@ -230,9 +231,9 @@ "@electron/universal": ["@electron/universal@1.5.1", "", { "dependencies": { "@electron/asar": "^3.2.1", "@malept/cross-spawn-promise": "^1.1.0", "debug": "^4.3.1", "dir-compare": "^3.0.0", "fs-extra": "^9.0.1", "minimatch": "^3.0.4", "plist": "^3.0.4" } }, "sha512-kbgXxyEauPJiQQUNG2VgUeyfQNFk6hBF11ISN2PNI6agUgPl55pv4eQmaqHzTAzchBvqZ2tQuRVaPStGf0mxGw=="], - "@emnapi/core": ["@emnapi/core@1.5.0", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" } }, "sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg=="], + "@emnapi/core": ["@emnapi/core@1.6.0", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" } }, "sha512-zq/ay+9fNIJJtJiZxdTnXS20PllcYMX3OE23ESc4HK/bdYu3cOWYVhsOhVnXALfU/uqJIxn5NBPd9z4v+SfoSg=="], - "@emnapi/runtime": ["@emnapi/runtime@1.5.0", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ=="], + "@emnapi/runtime": ["@emnapi/runtime@1.6.0", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-obtUmAHTMjll499P+D9A3axeJFlhdjOWdKUNs/U6QIGT7V5RjcUW1xToAzjvmgTSQhDbYn/NwfTRoJcQ2rNBxA=="], "@emnapi/wasi-threads": ["@emnapi/wasi-threads@1.1.0", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ=="], @@ -290,19 +291,19 @@ "@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.9.0", "", { "dependencies": { "eslint-visitor-keys": "^3.4.3" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g=="], - "@eslint-community/regexpp": ["@eslint-community/regexpp@4.12.1", "", {}, "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ=="], + "@eslint-community/regexpp": ["@eslint-community/regexpp@4.12.2", "", {}, "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew=="], - "@eslint/config-array": ["@eslint/config-array@0.21.0", "", { "dependencies": { "@eslint/object-schema": "^2.1.6", "debug": "^4.3.1", "minimatch": "^3.1.2" } }, "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ=="], + "@eslint/config-array": ["@eslint/config-array@0.21.1", "", { "dependencies": { "@eslint/object-schema": "^2.1.7", "debug": "^4.3.1", "minimatch": "^3.1.2" } }, "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA=="], - "@eslint/config-helpers": ["@eslint/config-helpers@0.4.0", "", { "dependencies": { "@eslint/core": "^0.16.0" } }, "sha512-WUFvV4WoIwW8Bv0KeKCIIEgdSiFOsulyN0xrMu+7z43q/hkOLXjvb5u7UC9jDxvRzcrbEmuZBX5yJZz1741jog=="], + "@eslint/config-helpers": ["@eslint/config-helpers@0.4.1", "", { "dependencies": { "@eslint/core": "^0.16.0" } }, "sha512-csZAzkNhsgwb0I/UAV6/RGFTbiakPCf0ZrGmrIxQpYvGZ00PhTkSnyKNolphgIvmnJeGw6rcGVEXfTzUnFuEvw=="], "@eslint/core": ["@eslint/core@0.16.0", "", { "dependencies": { "@types/json-schema": "^7.0.15" } }, "sha512-nmC8/totwobIiFcGkDza3GIKfAw1+hLiYVrh3I1nIomQ8PEr5cxg34jnkmGawul/ep52wGRAcyeDCNtWKSOj4Q=="], "@eslint/eslintrc": ["@eslint/eslintrc@3.3.1", "", { "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^10.0.1", "globals": "^14.0.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" } }, "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ=="], - "@eslint/js": ["@eslint/js@9.37.0", "", {}, "sha512-jaS+NJ+hximswBG6pjNX0uEJZkrT0zwpVi3BA3vX22aFGjJjmgSTSmPpZCRKmoBL5VY/M6p0xsSJx7rk7sy5gg=="], + "@eslint/js": ["@eslint/js@9.38.0", "", {}, "sha512-UZ1VpFvXf9J06YG9xQBdnzU+kthors6KjhMAl6f4gH4usHyh31rUf2DLGInT8RFYIReYXNSydgPY0V2LuWgl7A=="], - "@eslint/object-schema": ["@eslint/object-schema@2.1.6", "", {}, "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA=="], + "@eslint/object-schema": ["@eslint/object-schema@2.1.7", "", {}, "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA=="], "@eslint/plugin-kit": ["@eslint/plugin-kit@0.4.0", "", { "dependencies": { "@eslint/core": "^0.16.0", "levn": "^0.4.1" } }, "sha512-sB5uyeq+dwCWyPi31B2gQlVlo+j5brPlWx4yZBrEaRo/nhdDE8Xke1gsGgtiBdaBTxuTkceLVuVt/pclrasb0A=="], @@ -408,9 +409,9 @@ "@pkgr/core": ["@pkgr/core@0.2.9", "", {}, "sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA=="], - "@playwright/test": ["@playwright/test@1.56.0", "", { "dependencies": { "playwright": "1.56.0" }, "bin": { "playwright": "cli.js" } }, "sha512-Tzh95Twig7hUwwNe381/K3PggZBZblKUe2wv25oIpzWLr6Z0m4KgV1ZVIjnR6GM9ANEqjZD7XsZEa6JL/7YEgg=="], + "@playwright/test": ["@playwright/test@1.56.1", "", { "dependencies": { "playwright": "1.56.1" }, "bin": { "playwright": "cli.js" } }, "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg=="], - "@posthog/core": ["@posthog/core@1.3.0", "", {}, "sha512-hxLL8kZNHH098geedcxCz8y6xojkNYbmJEW+1vFXsmPcExyCXIUUJ/34X6xa9GcprKxd0Wsx3vfJQLQX4iVPhw=="], + "@posthog/core": ["@posthog/core@1.3.1", "", {}, "sha512-sGKVHituJ8L/bJxVV4KamMFp+IBWAZyCiYunFawJZ4cc59PCtLnKFIMEV6kn7A4eZQcQ6EKV5Via4sF3Z7qMLQ=="], "@radix-ui/number": ["@radix-ui/number@1.1.1", "", {}, "sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g=="], @@ -684,35 +685,35 @@ "@szmarczak/http-timer": ["@szmarczak/http-timer@4.0.6", "", { "dependencies": { "defer-to-connect": "^2.0.0" } }, "sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w=="], - "@tailwindcss/node": ["@tailwindcss/node@4.1.15", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "enhanced-resolve": "^5.18.3", "jiti": "^2.6.0", "lightningcss": "1.30.2", "magic-string": "^0.30.19", "source-map-js": "^1.2.1", "tailwindcss": "4.1.15" } }, "sha512-HF4+7QxATZWY3Jr8OlZrBSXmwT3Watj0OogeDvdUY/ByXJHQ+LBtqA2brDb3sBxYslIFx6UP94BJ4X6a4L9Bmw=="], + "@tailwindcss/node": ["@tailwindcss/node@4.1.16", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "enhanced-resolve": "^5.18.3", "jiti": "^2.6.1", "lightningcss": "1.30.2", "magic-string": "^0.30.19", "source-map-js": "^1.2.1", "tailwindcss": "4.1.16" } }, "sha512-BX5iaSsloNuvKNHRN3k2RcCuTEgASTo77mofW0vmeHkfrDWaoFAFvNHpEgtu0eqyypcyiBkDWzSMxJhp3AUVcw=="], - "@tailwindcss/oxide": ["@tailwindcss/oxide@4.1.15", "", { "optionalDependencies": { "@tailwindcss/oxide-android-arm64": "4.1.15", "@tailwindcss/oxide-darwin-arm64": "4.1.15", "@tailwindcss/oxide-darwin-x64": "4.1.15", "@tailwindcss/oxide-freebsd-x64": "4.1.15", "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.15", "@tailwindcss/oxide-linux-arm64-gnu": "4.1.15", "@tailwindcss/oxide-linux-arm64-musl": "4.1.15", "@tailwindcss/oxide-linux-x64-gnu": "4.1.15", "@tailwindcss/oxide-linux-x64-musl": "4.1.15", "@tailwindcss/oxide-wasm32-wasi": "4.1.15", "@tailwindcss/oxide-win32-arm64-msvc": "4.1.15", "@tailwindcss/oxide-win32-x64-msvc": "4.1.15" } }, "sha512-krhX+UOOgnsUuks2SR7hFafXmLQrKxB4YyRTERuCE59JlYL+FawgaAlSkOYmDRJdf1Q+IFNDMl9iRnBW7QBDfQ=="], + "@tailwindcss/oxide": ["@tailwindcss/oxide@4.1.16", "", { "optionalDependencies": { "@tailwindcss/oxide-android-arm64": "4.1.16", "@tailwindcss/oxide-darwin-arm64": "4.1.16", "@tailwindcss/oxide-darwin-x64": "4.1.16", "@tailwindcss/oxide-freebsd-x64": "4.1.16", "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.16", "@tailwindcss/oxide-linux-arm64-gnu": "4.1.16", "@tailwindcss/oxide-linux-arm64-musl": "4.1.16", "@tailwindcss/oxide-linux-x64-gnu": "4.1.16", "@tailwindcss/oxide-linux-x64-musl": "4.1.16", "@tailwindcss/oxide-wasm32-wasi": "4.1.16", "@tailwindcss/oxide-win32-arm64-msvc": "4.1.16", "@tailwindcss/oxide-win32-x64-msvc": "4.1.16" } }, "sha512-2OSv52FRuhdlgyOQqgtQHuCgXnS8nFSYRp2tJ+4WZXKgTxqPy7SMSls8c3mPT5pkZ17SBToGM5LHEJBO7miEdg=="], - "@tailwindcss/oxide-android-arm64": ["@tailwindcss/oxide-android-arm64@4.1.15", "", { "os": "android", "cpu": "arm64" }, "sha512-TkUkUgAw8At4cBjCeVCRMc/guVLKOU1D+sBPrHt5uVcGhlbVKxrCaCW9OKUIBv1oWkjh4GbunD/u/Mf0ql6kEA=="], + "@tailwindcss/oxide-android-arm64": ["@tailwindcss/oxide-android-arm64@4.1.16", "", { "os": "android", "cpu": "arm64" }, "sha512-8+ctzkjHgwDJ5caq9IqRSgsP70xhdhJvm+oueS/yhD5ixLhqTw9fSL1OurzMUhBwE5zK26FXLCz2f/RtkISqHA=="], - "@tailwindcss/oxide-darwin-arm64": ["@tailwindcss/oxide-darwin-arm64@4.1.15", "", { "os": "darwin", "cpu": "arm64" }, "sha512-xt5XEJpn2piMSfvd1UFN6jrWXyaKCwikP4Pidcf+yfHTSzSpYhG3dcMktjNkQO3JiLCp+0bG0HoWGvz97K162w=="], + "@tailwindcss/oxide-darwin-arm64": ["@tailwindcss/oxide-darwin-arm64@4.1.16", "", { "os": "darwin", "cpu": "arm64" }, "sha512-C3oZy5042v2FOALBZtY0JTDnGNdS6w7DxL/odvSny17ORUnaRKhyTse8xYi3yKGyfnTUOdavRCdmc8QqJYwFKA=="], - "@tailwindcss/oxide-darwin-x64": ["@tailwindcss/oxide-darwin-x64@4.1.15", "", { "os": "darwin", "cpu": "x64" }, "sha512-TnWaxP6Bx2CojZEXAV2M01Yl13nYPpp0EtGpUrY+LMciKfIXiLL2r/SiSRpagE5Fp2gX+rflp/Os1VJDAyqymg=="], + "@tailwindcss/oxide-darwin-x64": ["@tailwindcss/oxide-darwin-x64@4.1.16", "", { "os": "darwin", "cpu": "x64" }, "sha512-vjrl/1Ub9+JwU6BP0emgipGjowzYZMjbWCDqwA2Z4vCa+HBSpP4v6U2ddejcHsolsYxwL5r4bPNoamlV0xDdLg=="], - "@tailwindcss/oxide-freebsd-x64": ["@tailwindcss/oxide-freebsd-x64@4.1.15", "", { "os": "freebsd", "cpu": "x64" }, "sha512-quISQDWqiB6Cqhjc3iWptXVZHNVENsWoI77L1qgGEHNIdLDLFnw3/AfY7DidAiiCIkGX/MjIdB3bbBZR/G2aJg=="], + "@tailwindcss/oxide-freebsd-x64": ["@tailwindcss/oxide-freebsd-x64@4.1.16", "", { "os": "freebsd", "cpu": "x64" }, "sha512-TSMpPYpQLm+aR1wW5rKuUuEruc/oOX3C7H0BTnPDn7W/eMw8W+MRMpiypKMkXZfwH8wqPIRKppuZoedTtNj2tg=="], - "@tailwindcss/oxide-linux-arm-gnueabihf": ["@tailwindcss/oxide-linux-arm-gnueabihf@4.1.15", "", { "os": "linux", "cpu": "arm" }, "sha512-ObG76+vPlab65xzVUQbExmDU9FIeYLQ5k2LrQdR2Ud6hboR+ZobXpDoKEYXf/uOezOfIYmy2Ta3w0ejkTg9yxg=="], + "@tailwindcss/oxide-linux-arm-gnueabihf": ["@tailwindcss/oxide-linux-arm-gnueabihf@4.1.16", "", { "os": "linux", "cpu": "arm" }, "sha512-p0GGfRg/w0sdsFKBjMYvvKIiKy/LNWLWgV/plR4lUgrsxFAoQBFrXkZ4C0w8IOXfslB9vHK/JGASWD2IefIpvw=="], - "@tailwindcss/oxide-linux-arm64-gnu": ["@tailwindcss/oxide-linux-arm64-gnu@4.1.15", "", { "os": "linux", "cpu": "arm64" }, "sha512-4WbBacRmk43pkb8/xts3wnOZMDKsPFyEH/oisCm2q3aLZND25ufvJKcDUpAu0cS+CBOL05dYa8D4U5OWECuH/Q=="], + "@tailwindcss/oxide-linux-arm64-gnu": ["@tailwindcss/oxide-linux-arm64-gnu@4.1.16", "", { "os": "linux", "cpu": "arm64" }, "sha512-DoixyMmTNO19rwRPdqviTrG1rYzpxgyYJl8RgQvdAQUzxC1ToLRqtNJpU/ATURSKgIg6uerPw2feW0aS8SNr/w=="], - "@tailwindcss/oxide-linux-arm64-musl": ["@tailwindcss/oxide-linux-arm64-musl@4.1.15", "", { "os": "linux", "cpu": "arm64" }, "sha512-AbvmEiteEj1nf42nE8skdHv73NoR+EwXVSgPY6l39X12Ex8pzOwwfi3Kc8GAmjsnsaDEbk+aj9NyL3UeyHcTLg=="], + "@tailwindcss/oxide-linux-arm64-musl": ["@tailwindcss/oxide-linux-arm64-musl@4.1.16", "", { "os": "linux", "cpu": "arm64" }, "sha512-H81UXMa9hJhWhaAUca6bU2wm5RRFpuHImrwXBUvPbYb+3jo32I9VIwpOX6hms0fPmA6f2pGVlybO6qU8pF4fzQ=="], - "@tailwindcss/oxide-linux-x64-gnu": ["@tailwindcss/oxide-linux-x64-gnu@4.1.15", "", { "os": "linux", "cpu": "x64" }, "sha512-+rzMVlvVgrXtFiS+ES78yWgKqpThgV19ISKD58Ck+YO5pO5KjyxLt7AWKsWMbY0R9yBDC82w6QVGz837AKQcHg=="], + "@tailwindcss/oxide-linux-x64-gnu": ["@tailwindcss/oxide-linux-x64-gnu@4.1.16", "", { "os": "linux", "cpu": "x64" }, "sha512-ZGHQxDtFC2/ruo7t99Qo2TTIvOERULPl5l0K1g0oK6b5PGqjYMga+FcY1wIUnrUxY56h28FxybtDEla+ICOyew=="], - "@tailwindcss/oxide-linux-x64-musl": ["@tailwindcss/oxide-linux-x64-musl@4.1.15", "", { "os": "linux", "cpu": "x64" }, "sha512-fPdEy7a8eQN9qOIK3Em9D3TO1z41JScJn8yxl/76mp4sAXFDfV4YXxsiptJcOwy6bGR+70ZSwFIZhTXzQeqwQg=="], + "@tailwindcss/oxide-linux-x64-musl": ["@tailwindcss/oxide-linux-x64-musl@4.1.16", "", { "os": "linux", "cpu": "x64" }, "sha512-Oi1tAaa0rcKf1Og9MzKeINZzMLPbhxvm7rno5/zuP1WYmpiG0bEHq4AcRUiG2165/WUzvxkW4XDYCscZWbTLZw=="], - "@tailwindcss/oxide-wasm32-wasi": ["@tailwindcss/oxide-wasm32-wasi@4.1.15", "", { "dependencies": { "@emnapi/core": "^1.5.0", "@emnapi/runtime": "^1.5.0", "@emnapi/wasi-threads": "^1.1.0", "@napi-rs/wasm-runtime": "^1.0.7", "@tybys/wasm-util": "^0.10.1", "tslib": "^2.4.0" }, "cpu": "none" }, "sha512-sJ4yd6iXXdlgIMfIBXuVGp/NvmviEoMVWMOAGxtxhzLPp9LOj5k0pMEMZdjeMCl4C6Up+RM8T3Zgk+BMQ0bGcQ=="], + "@tailwindcss/oxide-wasm32-wasi": ["@tailwindcss/oxide-wasm32-wasi@4.1.16", "", { "dependencies": { "@emnapi/core": "^1.5.0", "@emnapi/runtime": "^1.5.0", "@emnapi/wasi-threads": "^1.1.0", "@napi-rs/wasm-runtime": "^1.0.7", "@tybys/wasm-util": "^0.10.1", "tslib": "^2.4.0" }, "cpu": "none" }, "sha512-B01u/b8LteGRwucIBmCQ07FVXLzImWESAIMcUU6nvFt/tYsQ6IHz8DmZ5KtvmwxD+iTYBtM1xwoGXswnlu9v0Q=="], - "@tailwindcss/oxide-win32-arm64-msvc": ["@tailwindcss/oxide-win32-arm64-msvc@4.1.15", "", { "os": "win32", "cpu": "arm64" }, "sha512-sJGE5faXnNQ1iXeqmRin7Ds/ru2fgCiaQZQQz3ZGIDtvbkeV85rAZ0QJFMDg0FrqsffZG96H1U9AQlNBRLsHVg=="], + "@tailwindcss/oxide-win32-arm64-msvc": ["@tailwindcss/oxide-win32-arm64-msvc@4.1.16", "", { "os": "win32", "cpu": "arm64" }, "sha512-zX+Q8sSkGj6HKRTMJXuPvOcP8XfYON24zJBRPlszcH1Np7xuHXhWn8qfFjIujVzvH3BHU+16jBXwgpl20i+v9A=="], - "@tailwindcss/oxide-win32-x64-msvc": ["@tailwindcss/oxide-win32-x64-msvc@4.1.15", "", { "os": "win32", "cpu": "x64" }, "sha512-NLeHE7jUV6HcFKS504bpOohyi01zPXi2PXmjFfkzTph8xRxDdxkRsXm/xDO5uV5K3brrE1cCwbUYmFUSHR3u1w=="], + "@tailwindcss/oxide-win32-x64-msvc": ["@tailwindcss/oxide-win32-x64-msvc@4.1.16", "", { "os": "win32", "cpu": "x64" }, "sha512-m5dDFJUEejbFqP+UXVstd4W/wnxA4F61q8SoL+mqTypId2T2ZpuxosNSgowiCnLp2+Z+rivdU0AqpfgiD7yCBg=="], - "@tailwindcss/vite": ["@tailwindcss/vite@4.1.15", "", { "dependencies": { "@tailwindcss/node": "4.1.15", "@tailwindcss/oxide": "4.1.15", "tailwindcss": "4.1.15" }, "peerDependencies": { "vite": "^5.2.0 || ^6 || ^7" } }, "sha512-B6s60MZRTUil+xKoZoGe6i0Iar5VuW+pmcGlda2FX+guDuQ1G1sjiIy1W0frneVpeL/ZjZ4KEgWZHNrIm++2qA=="], + "@tailwindcss/vite": ["@tailwindcss/vite@4.1.16", "", { "dependencies": { "@tailwindcss/node": "4.1.16", "@tailwindcss/oxide": "4.1.16", "tailwindcss": "4.1.16" }, "peerDependencies": { "vite": "^5.2.0 || ^6 || ^7" } }, "sha512-bbguNBcDxsRmi9nnlWJxhfDWamY3lmcyACHcdO1crxfzuLpOhHLLtEIN/nCbbAtj5rchUgQD17QVAKi1f7IsKg=="], "@testing-library/dom": ["@testing-library/dom@10.4.0", "", { "dependencies": { "@babel/code-frame": "^7.10.4", "@babel/runtime": "^7.12.5", "@types/aria-query": "^5.0.1", "aria-query": "5.3.0", "chalk": "^4.1.0", "dom-accessibility-api": "^0.5.9", "lz-string": "^1.5.0", "pretty-format": "^27.0.2" } }, "sha512-pemlzrSESWbdAloYml3bAJMEfNh1Z7EduzqPKprCH5S341frlpYnUEW0H72dLxa6IsYr+mPno20GiSm+h9dEdQ=="], @@ -866,9 +867,7 @@ "@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="], - "@types/node": ["@types/node@24.7.2", "", { "dependencies": { "undici-types": "~7.14.0" } }, "sha512-/NbVmcGTP+lj5oa4yiYxxeBjRivKQ5Ns1eSZeB99ExsEQ6rX5XYU1Zy/gGxY/ilqtD4Etx9mKyrPxZRetiahhA=="], - - "@types/parse-json": ["@types/parse-json@4.0.2", "", {}, "sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw=="], + "@types/node": ["@types/node@24.9.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-QoiaXANRkSXK6p0Duvt56W208du4P9Uye9hWLWgGMDTEoKPhuenzNcC4vGUmrNkiOKTlIrBoyNQYNpSwfEZXSg=="], "@types/plist": ["@types/plist@3.0.5", "", { "dependencies": { "@types/node": "*", "xmlbuilder": ">=11.0.1" } }, "sha512-E6OCaRmAe4WDmWNsL/9RMqdkkzDCY1etutkflWk4c+AcjDU07Pcz1fQwTX0TQz+Pxqn9i4L1TU3UFpjnrcDgxA=="], @@ -912,41 +911,41 @@ "@types/yauzl": ["@types/yauzl@2.10.3", "", { "dependencies": { "@types/node": "*" } }, "sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q=="], - "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.46.1", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.46.1", "@typescript-eslint/type-utils": "8.46.1", "@typescript-eslint/utils": "8.46.1", "@typescript-eslint/visitor-keys": "8.46.1", "graphemer": "^1.4.0", "ignore": "^7.0.0", "natural-compare": "^1.4.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "@typescript-eslint/parser": "^8.46.1", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-rUsLh8PXmBjdiPY+Emjz9NX2yHvhS11v0SR6xNJkm5GM1MO9ea/1GoDKlHHZGrOJclL/cZ2i/vRUYVtjRhrHVQ=="], + "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.46.2", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.46.2", "@typescript-eslint/type-utils": "8.46.2", "@typescript-eslint/utils": "8.46.2", "@typescript-eslint/visitor-keys": "8.46.2", "graphemer": "^1.4.0", "ignore": "^7.0.0", "natural-compare": "^1.4.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "@typescript-eslint/parser": "^8.46.2", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-ZGBMToy857/NIPaaCucIUQgqueOiq7HeAKkhlvqVV4lm089zUFW6ikRySx2v+cAhKeUCPuWVHeimyk6Dw1iY3w=="], - "@typescript-eslint/parser": ["@typescript-eslint/parser@8.46.1", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.46.1", "@typescript-eslint/types": "8.46.1", "@typescript-eslint/typescript-estree": "8.46.1", "@typescript-eslint/visitor-keys": "8.46.1", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-6JSSaBZmsKvEkbRUkf7Zj7dru/8ZCrJxAqArcLaVMee5907JdtEbKGsZ7zNiIm/UAkpGUkaSMZEXShnN2D1HZA=="], + "@typescript-eslint/parser": ["@typescript-eslint/parser@8.46.2", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.46.2", "@typescript-eslint/types": "8.46.2", "@typescript-eslint/typescript-estree": "8.46.2", "@typescript-eslint/visitor-keys": "8.46.2", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-BnOroVl1SgrPLywqxyqdJ4l3S2MsKVLDVxZvjI1Eoe8ev2r3kGDo+PcMihNmDE+6/KjkTubSJnmqGZZjQSBq/g=="], - "@typescript-eslint/project-service": ["@typescript-eslint/project-service@8.46.1", "", { "dependencies": { "@typescript-eslint/tsconfig-utils": "^8.46.1", "@typescript-eslint/types": "^8.46.1", "debug": "^4.3.4" }, "peerDependencies": { "typescript": ">=4.8.4 <6.0.0" } }, "sha512-FOIaFVMHzRskXr5J4Jp8lFVV0gz5ngv3RHmn+E4HYxSJ3DgDzU7fVI1/M7Ijh1zf6S7HIoaIOtln1H5y8V+9Zg=="], + "@typescript-eslint/project-service": ["@typescript-eslint/project-service@8.46.2", "", { "dependencies": { "@typescript-eslint/tsconfig-utils": "^8.46.2", "@typescript-eslint/types": "^8.46.2", "debug": "^4.3.4" }, "peerDependencies": { "typescript": ">=4.8.4 <6.0.0" } }, "sha512-PULOLZ9iqwI7hXcmL4fVfIsBi6AN9YxRc0frbvmg8f+4hQAjQ5GYNKK0DIArNo+rOKmR/iBYwkpBmnIwin4wBg=="], - "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.46.1", "", { "dependencies": { "@typescript-eslint/types": "8.46.1", "@typescript-eslint/visitor-keys": "8.46.1" } }, "sha512-weL9Gg3/5F0pVQKiF8eOXFZp8emqWzZsOJuWRUNtHT+UNV2xSJegmpCNQHy37aEQIbToTq7RHKhWvOsmbM680A=="], + "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.46.2", "", { "dependencies": { "@typescript-eslint/types": "8.46.2", "@typescript-eslint/visitor-keys": "8.46.2" } }, "sha512-LF4b/NmGvdWEHD2H4MsHD8ny6JpiVNDzrSZr3CsckEgCbAGZbYM4Cqxvi9L+WqDMT+51Ozy7lt2M+d0JLEuBqA=="], - "@typescript-eslint/tsconfig-utils": ["@typescript-eslint/tsconfig-utils@8.46.1", "", { "peerDependencies": { "typescript": ">=4.8.4 <6.0.0" } }, "sha512-X88+J/CwFvlJB+mK09VFqx5FE4H5cXD+H/Bdza2aEWkSb8hnWIQorNcscRl4IEo1Cz9VI/+/r/jnGWkbWPx54g=="], + "@typescript-eslint/tsconfig-utils": ["@typescript-eslint/tsconfig-utils@8.46.2", "", { "peerDependencies": { "typescript": ">=4.8.4 <6.0.0" } }, "sha512-a7QH6fw4S57+F5y2FIxxSDyi5M4UfGF+Jl1bCGd7+L4KsaUY80GsiF/t0UoRFDHAguKlBaACWJRmdrc6Xfkkag=="], - "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.46.1", "", { "dependencies": { "@typescript-eslint/types": "8.46.1", "@typescript-eslint/typescript-estree": "8.46.1", "@typescript-eslint/utils": "8.46.1", "debug": "^4.3.4", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-+BlmiHIiqufBxkVnOtFwjah/vrkF4MtKKvpXrKSPLCkCtAp8H01/VV43sfqA98Od7nJpDcFnkwgyfQbOG0AMvw=="], + "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.46.2", "", { "dependencies": { "@typescript-eslint/types": "8.46.2", "@typescript-eslint/typescript-estree": "8.46.2", "@typescript-eslint/utils": "8.46.2", "debug": "^4.3.4", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-HbPM4LbaAAt/DjxXaG9yiS9brOOz6fabal4uvUmaUYe6l3K1phQDMQKBRUrr06BQkxkvIZVVHttqiybM9nJsLA=="], - "@typescript-eslint/types": ["@typescript-eslint/types@8.46.1", "", {}, "sha512-C+soprGBHwWBdkDpbaRC4paGBrkIXxVlNohadL5o0kfhsXqOC6GYH2S/Obmig+I0HTDl8wMaRySwrfrXVP8/pQ=="], + "@typescript-eslint/types": ["@typescript-eslint/types@8.46.2", "", {}, "sha512-lNCWCbq7rpg7qDsQrd3D6NyWYu+gkTENkG5IKYhUIcxSb59SQC/hEQ+MrG4sTgBVghTonNWq42bA/d4yYumldQ=="], - "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.46.1", "", { "dependencies": { "@typescript-eslint/project-service": "8.46.1", "@typescript-eslint/tsconfig-utils": "8.46.1", "@typescript-eslint/types": "8.46.1", "@typescript-eslint/visitor-keys": "8.46.1", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "typescript": ">=4.8.4 <6.0.0" } }, "sha512-uIifjT4s8cQKFQ8ZBXXyoUODtRoAd7F7+G8MKmtzj17+1UbdzFl52AzRyZRyKqPHhgzvXunnSckVu36flGy8cg=="], + "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.46.2", "", { "dependencies": { "@typescript-eslint/project-service": "8.46.2", "@typescript-eslint/tsconfig-utils": "8.46.2", "@typescript-eslint/types": "8.46.2", "@typescript-eslint/visitor-keys": "8.46.2", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "typescript": ">=4.8.4 <6.0.0" } }, "sha512-f7rW7LJ2b7Uh2EiQ+7sza6RDZnajbNbemn54Ob6fRwQbgcIn+GWfyuHDHRYgRoZu1P4AayVScrRW+YfbTvPQoQ=="], - "@typescript-eslint/utils": ["@typescript-eslint/utils@8.46.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.7.0", "@typescript-eslint/scope-manager": "8.46.1", "@typescript-eslint/types": "8.46.1", "@typescript-eslint/typescript-estree": "8.46.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-vkYUy6LdZS7q1v/Gxb2Zs7zziuXN0wxqsetJdeZdRe/f5dwJFglmuvZBfTUivCtjH725C1jWCDfpadadD95EDQ=="], + "@typescript-eslint/utils": ["@typescript-eslint/utils@8.46.2", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.7.0", "@typescript-eslint/scope-manager": "8.46.2", "@typescript-eslint/types": "8.46.2", "@typescript-eslint/typescript-estree": "8.46.2" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-sExxzucx0Tud5tE0XqR0lT0psBQvEpnpiul9XbGUB1QwpWJJAps1O/Z7hJxLGiZLBKMCutjTzDgmd1muEhBnVg=="], - "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.46.1", "", { "dependencies": { "@typescript-eslint/types": "8.46.1", "eslint-visitor-keys": "^4.2.1" } }, "sha512-ptkmIf2iDkNUjdeu2bQqhFPV1m6qTnFFjg7PPDjxKWaMaP0Z6I9l30Jr3g5QqbZGdw8YdYvLp+XnqnWWZOg/NA=="], + "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.46.2", "", { "dependencies": { "@typescript-eslint/types": "8.46.2", "eslint-visitor-keys": "^4.2.1" } }, "sha512-tUFMXI4gxzzMXt4xpGJEsBsTox0XbNQ1y94EwlD/CuZwFcQP79xfQqMhau9HsRc/J0cAPA/HZt1dZPtGn9V/7w=="], - "@typescript/native-preview": ["@typescript/native-preview@7.0.0-dev.20251015.1", "", { "optionalDependencies": { "@typescript/native-preview-darwin-arm64": "7.0.0-dev.20251015.1", "@typescript/native-preview-darwin-x64": "7.0.0-dev.20251015.1", "@typescript/native-preview-linux-arm": "7.0.0-dev.20251015.1", "@typescript/native-preview-linux-arm64": "7.0.0-dev.20251015.1", "@typescript/native-preview-linux-x64": "7.0.0-dev.20251015.1", "@typescript/native-preview-win32-arm64": "7.0.0-dev.20251015.1", "@typescript/native-preview-win32-x64": "7.0.0-dev.20251015.1" }, "bin": { "tsgo": "bin/tsgo.js" } }, "sha512-QNNVpnjvJJ5yVZf2v4vHT/fK2mAzE5VC5m4mYI+aboT0Dlt4ZgPkYs/CodG+NIsGce8fkEs7hZNk8W4RFf7biw=="], + "@typescript/native-preview": ["@typescript/native-preview@7.0.0-dev.20251023.1", "", { "optionalDependencies": { "@typescript/native-preview-darwin-arm64": "7.0.0-dev.20251023.1", "@typescript/native-preview-darwin-x64": "7.0.0-dev.20251023.1", "@typescript/native-preview-linux-arm": "7.0.0-dev.20251023.1", "@typescript/native-preview-linux-arm64": "7.0.0-dev.20251023.1", "@typescript/native-preview-linux-x64": "7.0.0-dev.20251023.1", "@typescript/native-preview-win32-arm64": "7.0.0-dev.20251023.1", "@typescript/native-preview-win32-x64": "7.0.0-dev.20251023.1" }, "bin": { "tsgo": "bin/tsgo.js" } }, "sha512-vR8Hhj/6XYWzq+MquAncZeXjNdmncT3Jf5avdrMIWHYnmjWqcHtIX61NM3N32k2vcfoGfiHZgMGN4BCYmlmp0Q=="], - "@typescript/native-preview-darwin-arm64": ["@typescript/native-preview-darwin-arm64@7.0.0-dev.20251015.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-nX3IvW3zVZItG6BWkSmQlyNiq23obmSU+S+Yp0bN6elR+S+yLWssutb1f8mmjOVx8zZVIB0PHuzeiTb3a89aEA=="], + "@typescript/native-preview-darwin-arm64": ["@typescript/native-preview-darwin-arm64@7.0.0-dev.20251023.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Qe8KKzhe+bEn84+c90DPBYMkLZ1Q6709DmxStlhdSJycO4GAXlURcLyFAegbLGUPen2oU1NISFlCuOoGUDufvw=="], - "@typescript/native-preview-darwin-x64": ["@typescript/native-preview-darwin-x64@7.0.0-dev.20251015.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-cqqqChfieGbtuDbWSDZKjMz/SDlt2B0XY1rdGS3HNzHocpxYHg5cKQGGddQxwSQp/OdeRpkpEzfvRsbpWnv/ig=="], + "@typescript/native-preview-darwin-x64": ["@typescript/native-preview-darwin-x64@7.0.0-dev.20251023.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-1WDzpaluw8y4qfOGTyAFHRskEcg/qPSYQwkDj3jw9lLpYwhXo6uqZ7TmPEX9QhzjtUvmMCnqq4hvwPN/0e3h8Q=="], - "@typescript/native-preview-linux-arm": ["@typescript/native-preview-linux-arm@7.0.0-dev.20251015.1", "", { "os": "linux", "cpu": "arm" }, "sha512-T1utGfiJ4auwPF+aOXGtJauEvyCMCSd2reGsv0P9vnE5YeJheopZ6VTtmvYkN9IsIHBvX+BLbOv4Gr3zubAY+w=="], + "@typescript/native-preview-linux-arm": ["@typescript/native-preview-linux-arm@7.0.0-dev.20251023.1", "", { "os": "linux", "cpu": "arm" }, "sha512-Q/GxNqqqN3LNVayrWrcdV8aB1tzDbAPWeYqpvAeJpaeioIPXpcA+nqmw9yLkgCQbWMD/YA2Dum8otWtYP6sUyQ=="], - "@typescript/native-preview-linux-arm64": ["@typescript/native-preview-linux-arm64@7.0.0-dev.20251015.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-iL6uD3P4NtBslegrtxPRcobbg+PkKnck+AD7lLT/KGfNXy0vB5touFdNhWY+FoaahSTyAYuS6Fo2F/FzdzzLkw=="], + "@typescript/native-preview-linux-arm64": ["@typescript/native-preview-linux-arm64@7.0.0-dev.20251023.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-Q4jcLjgP6GyUBFNgM9bQX5Scsq+RYFVEXkwC1a0f7Jpz8u3qzWz9VRJNzubHcXqFzCGbru0YPN5bZMylNOlP+g=="], - "@typescript/native-preview-linux-x64": ["@typescript/native-preview-linux-x64@7.0.0-dev.20251015.1", "", { "os": "linux", "cpu": "x64" }, "sha512-xGE8apymvrvMrV9Vt3t8nqD/xcoiC/gCgbxrFr9xM7WkoCre7ZMUbTsiSwORpgj8ELKszgGsAaNwZY6RcI2sLA=="], + "@typescript/native-preview-linux-x64": ["@typescript/native-preview-linux-x64@7.0.0-dev.20251023.1", "", { "os": "linux", "cpu": "x64" }, "sha512-JH5LJMcUPWuCBPgrGybSSKoM4ktpBgxIBCLhunpL0z9vMxHOAXMbfLFu8cdM8X+rr6H+C0IDi/mEvUqMNOvlsA=="], - "@typescript/native-preview-win32-arm64": ["@typescript/native-preview-win32-arm64@7.0.0-dev.20251015.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-QxIR7d/xLLYTLXa7UMtxb/m0jB18UNK1FhHiHFUy6udjrVlfPmcXOIv4TUZxHGFx00I2QWNzySWd5DQOs8jllQ=="], + "@typescript/native-preview-win32-arm64": ["@typescript/native-preview-win32-arm64@7.0.0-dev.20251023.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-8n/uGR9pwkf3VO8Pok/0TOo0SUyDRlFdE7WWgundGz+X3rlSZYdi7fI9mFYmnSSFOOB7gKbiE0fFFSTIcDY36Q=="], - "@typescript/native-preview-win32-x64": ["@typescript/native-preview-win32-x64@7.0.0-dev.20251015.1", "", { "os": "win32", "cpu": "x64" }, "sha512-vir9fC7vfpPP3xWgHZnK/GPqCwFRUCCOw8sKtXgGVf1EQcKo/H+pzCMlRTGdmHoGRBEI7eSyTn0fnQcKcnMymg=="], + "@typescript/native-preview-win32-x64": ["@typescript/native-preview-win32-x64@7.0.0-dev.20251023.1", "", { "os": "win32", "cpu": "x64" }, "sha512-GUz7HU6jSUwHEFauwrtdsXdbOVEQ0qv0Jaz3HJeUx+DrmU8Zl+FM1weOyq1GXmFDjw3dzzR5yIxCld3M3SMT6Q=="], "@ungap/structured-clone": ["@ungap/structured-clone@1.3.0", "", {}, "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g=="], @@ -988,7 +987,7 @@ "@unrs/resolver-binding-win32-x64-msvc": ["@unrs/resolver-binding-win32-x64-msvc@1.11.1", "", { "os": "win32", "cpu": "x64" }, "sha512-lrW200hZdbfRtztbygyaq/6jP6AKE8qQN2KvPcJ+x7wiD038YtnYtZ82IMNJ69GJibV7bwL3y9FgK+5w/pYt6g=="], - "@vercel/oidc": ["@vercel/oidc@3.0.2", "", {}, "sha512-JekxQ0RApo4gS4un/iMGsIL1/k4KUBe3HmnGcDvzHuFBdQdudEJgTqcsJC7y6Ul4Yw5CeykgvQbX2XeEJd0+DA=="], + "@vercel/oidc": ["@vercel/oidc@3.0.3", "", {}, "sha512-yNEQvPcVrK9sIe637+I0jD6leluPxzwJKx/Haw6F4H77CdDsszUn5V3o96LPziXkSNE2B83+Z3mjqGKBK/R6Gg=="], "@vitejs/plugin-react": ["@vitejs/plugin-react@4.7.0", "", { "dependencies": { "@babel/core": "^7.28.0", "@babel/plugin-transform-react-jsx-self": "^7.27.1", "@babel/plugin-transform-react-jsx-source": "^7.27.1", "@rolldown/pluginutils": "1.0.0-beta.27", "@types/babel__core": "^7.20.5", "react-refresh": "^0.17.0" }, "peerDependencies": { "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" } }, "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA=="], @@ -1012,7 +1011,7 @@ "aggregate-error": ["aggregate-error@3.1.0", "", { "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" } }, "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA=="], - "ai": ["ai@5.0.72", "", { "dependencies": { "@ai-sdk/gateway": "1.0.40", "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.12", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-LB4APrlESLGHG/5x+VVdl0yYPpHPHpnGd5Gwl7AWVL+n7T0GYsNos/S/6dZ5CZzxLnPPEBkRgvJC4rupeZqyNg=="], + "ai": ["ai@5.0.77", "", { "dependencies": { "@ai-sdk/gateway": "2.0.0", "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.12", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-w0xP/guV27qLUR+60ru7dSDfF1Wlk6lPEHtXPBLfa8TNQ8Qc4FZ1RE9UGAdZmZU396FA6lKtP9P89Jzb5Z+Hnw=="], "ai-tokenizer": ["ai-tokenizer@1.0.3", "", { "peerDependencies": { "ai": "^5.0.0" }, "optionalPeers": ["ai"] }, "sha512-S2AQmQclsFVo79cu6FRGXwFQ0/0g+uqiEHLDvK7KLTUt8BdBE1Sf9oMnH5xBw2zxUmFWRx91GndvwyW6pw+hHw=="], @@ -1092,8 +1091,6 @@ "babel-plugin-jest-hoist": ["babel-plugin-jest-hoist@30.2.0", "", { "dependencies": { "@types/babel__core": "^7.20.5" } }, "sha512-ftzhzSGMUnOzcCXd6WHdBGMyuwy15Wnn0iyyWGKgBDLxf9/s5ABuraCSpBX2uG0jUg4rqJnxsLc5+oYBqoxVaA=="], - "babel-plugin-macros": ["babel-plugin-macros@3.1.0", "", { "dependencies": { "@babel/runtime": "^7.12.5", "cosmiconfig": "^7.0.0", "resolve": "^1.19.0" } }, "sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg=="], - "babel-plugin-react-compiler": ["babel-plugin-react-compiler@1.0.0", "", { "dependencies": { "@babel/types": "^7.26.0" } }, "sha512-Ixm8tFfoKKIPYdCCKYTsqv+Fd4IJ0DQqMyEimo+pxUOMUR9cVPlwTrFt9Avu+3cb6Zp3mAzl+t1MrG2fxxKsxw=="], "babel-preset-current-node-syntax": ["babel-preset-current-node-syntax@1.2.0", "", { "dependencies": { "@babel/plugin-syntax-async-generators": "^7.8.4", "@babel/plugin-syntax-bigint": "^7.8.3", "@babel/plugin-syntax-class-properties": "^7.12.13", "@babel/plugin-syntax-class-static-block": "^7.14.5", "@babel/plugin-syntax-import-attributes": "^7.24.7", "@babel/plugin-syntax-import-meta": "^7.10.4", "@babel/plugin-syntax-json-strings": "^7.8.3", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", "@babel/plugin-syntax-numeric-separator": "^7.10.4", "@babel/plugin-syntax-object-rest-spread": "^7.8.3", "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", "@babel/plugin-syntax-optional-chaining": "^7.8.3", "@babel/plugin-syntax-private-property-in-object": "^7.14.5", "@babel/plugin-syntax-top-level-await": "^7.14.5" }, "peerDependencies": { "@babel/core": "^7.0.0 || ^8.0.0-0" } }, "sha512-E/VlAEzRrsLEb2+dv8yp3bo4scof3l9nR4lrld+Iy5NyVqgVYUJnDAmunkhPMisRI32Qc4iRiz425d8vM++2fg=="], @@ -1106,7 +1103,7 @@ "base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="], - "baseline-browser-mapping": ["baseline-browser-mapping@2.8.16", "", { "bin": { "baseline-browser-mapping": "dist/cli.js" } }, "sha512-OMu3BGQ4E7P1ErFsIPpbJh0qvDudM/UuJeHgkAvfWe+0HFJCXh+t/l8L6fVLR55RI/UbKrVLnAXZSVwd9ysWYw=="], + "baseline-browser-mapping": ["baseline-browser-mapping@2.8.20", "", { "bin": { "baseline-browser-mapping": "dist/cli.js" } }, "sha512-JMWsdF+O8Orq3EMukbUN1QfbLK9mX2CkUmQBcW2T0s8OmdAUL5LLM/6wFwSrqXzlXB13yhyK9gTKS1rIizOduQ=="], "better-opn": ["better-opn@3.0.2", "", { "dependencies": { "open": "^8.0.4" } }, "sha512-aVNobHnJqLiUelTaHat9DZ1qM2w0C0Eym4LPI/3JxOnSokGVdsl1T1kN7TFvsEAD8G47A6VKQ0TVHqbBnYMJlQ=="], @@ -1128,7 +1125,7 @@ "browser-assert": ["browser-assert@1.2.1", "", {}, "sha512-nfulgvOR6S4gt9UKCeGJOuSGBPGiFT6oQ/2UBnvTY/5aQ1PnksW72fhZkM30DzoRRv2WpwZf1vHHEr3mtuXIWQ=="], - "browserslist": ["browserslist@4.26.3", "", { "dependencies": { "baseline-browser-mapping": "^2.8.9", "caniuse-lite": "^1.0.30001746", "electron-to-chromium": "^1.5.227", "node-releases": "^2.0.21", "update-browserslist-db": "^1.1.3" }, "bin": { "browserslist": "cli.js" } }, "sha512-lAUU+02RFBuCKQPj/P6NgjlbCnLBMp4UtgTx7vNHd3XSIJF87s9a5rA3aH2yw3GS9DqZAUbOtZdCCiZeVRqt0w=="], + "browserslist": ["browserslist@4.27.0", "", { "dependencies": { "baseline-browser-mapping": "^2.8.19", "caniuse-lite": "^1.0.30001751", "electron-to-chromium": "^1.5.238", "node-releases": "^2.0.26", "update-browserslist-db": "^1.1.4" }, "bin": { "browserslist": "cli.js" } }, "sha512-AXVQwdhot1eqLihwasPElhX2tAZiBjWdJ9i/Zcj2S6QYIjkx62OKSfnobkriB81C3l4w0rVy3Nt4jaTBltYEpw=="], "bs-logger": ["bs-logger@0.2.6", "", { "dependencies": { "fast-json-stable-stringify": "2.x" } }, "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog=="], @@ -1166,7 +1163,7 @@ "camelcase": ["camelcase@6.3.0", "", {}, "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA=="], - "caniuse-lite": ["caniuse-lite@1.0.30001750", "", {}, "sha512-cuom0g5sdX6rw00qOoLNSFCJ9/mYIsuSOA+yzpDw8eopiFqcVwQvZHqov0vmEighRxX++cfC0Vg1G+1Iy/mSpQ=="], + "caniuse-lite": ["caniuse-lite@1.0.30001751", "", {}, "sha512-A0QJhug0Ly64Ii3eIqHu5X51ebln3k4yTUkY1j8drqpWHVreg/VLijN48cZ1bYPiqOQuqpkIKnzr/Ul8V+p6Cw=="], "ccount": ["ccount@2.0.1", "", {}, "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg=="], @@ -1216,7 +1213,7 @@ "co": ["co@4.6.0", "", {}, "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ=="], - "collect-v8-coverage": ["collect-v8-coverage@1.0.2", "", {}, "sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q=="], + "collect-v8-coverage": ["collect-v8-coverage@1.0.3", "", {}, "sha512-1L5aqIkwPfiodaMgQunkF1zRhNqifHBmtbbbxcr6yVxxBnliw4TDOW6NxpO8DJLgJ16OT+Y4ztZqP6p/FtXnAw=="], "color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="], @@ -1450,7 +1447,7 @@ "ejs": ["ejs@3.1.10", "", { "dependencies": { "jake": "^10.8.5" }, "bin": { "ejs": "bin/cli.js" } }, "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA=="], - "electron": ["electron@38.3.0", "", { "dependencies": { "@electron/get": "^2.0.0", "@types/node": "^22.7.7", "extract-zip": "^2.0.1" }, "bin": { "electron": "cli.js" } }, "sha512-Wij4AzX4SAV0X/ktq+NrWrp5piTCSS8F6YWh1KAcG+QRtNzyns9XLKERP68nFHIwfprhxF2YCN2uj7nx9DaeJw=="], + "electron": ["electron@38.4.0", "", { "dependencies": { "@electron/get": "^2.0.0", "@types/node": "^22.7.7", "extract-zip": "^2.0.1" }, "bin": { "electron": "cli.js" } }, "sha512-9CsXKbGf2qpofVe2pQYSgom2E//zLDJO2rGLLbxgy9tkdTOs7000Gte+d/PUtzLjI/DS95jDK0ojYAeqjLvpYg=="], "electron-builder": ["electron-builder@24.13.3", "", { "dependencies": { "app-builder-lib": "24.13.3", "builder-util": "24.13.1", "builder-util-runtime": "9.2.4", "chalk": "^4.1.2", "dmg-builder": "24.13.3", "fs-extra": "^10.1.0", "is-ci": "^3.0.0", "lazy-val": "^1.0.5", "read-config-file": "6.3.2", "simple-update-notifier": "2.0.0", "yargs": "^17.6.2" }, "bin": { "electron-builder": "cli.js", "install-app-deps": "install-app-deps.js" } }, "sha512-yZSgVHft5dNVlo31qmJAe4BVKQfFdwpRw7sFp1iQglDRCDD6r22zfRJuZlhtB5gp9FHUxCMEoWGq10SkCnMAIg=="], @@ -1462,7 +1459,7 @@ "electron-publish": ["electron-publish@24.13.1", "", { "dependencies": { "@types/fs-extra": "^9.0.11", "builder-util": "24.13.1", "builder-util-runtime": "9.2.4", "chalk": "^4.1.2", "fs-extra": "^10.1.0", "lazy-val": "^1.0.5", "mime": "^2.5.2" } }, "sha512-2ZgdEqJ8e9D17Hwp5LEq5mLQPjqU3lv/IALvgp+4W8VeNhryfGhYEQC/PgDPMrnWUp+l60Ou5SJLsu+k4mhQ8A=="], - "electron-to-chromium": ["electron-to-chromium@1.5.237", "", {}, "sha512-icUt1NvfhGLar5lSWH3tHNzablaA5js3HVHacQimfP8ViEBOQv+L7DKEuHdbTZ0SKCO1ogTJTIL1Gwk9S6Qvcg=="], + "electron-to-chromium": ["electron-to-chromium@1.5.239", "", {}, "sha512-1y5w0Zsq39MSPmEjHjbizvhYoTaulVtivpxkp5q5kaPmQtsK6/2nvAzGRxNMS9DoYySp9PkW0MAQDwU1m764mg=="], "electron-updater": ["electron-updater@6.6.2", "", { "dependencies": { "builder-util-runtime": "9.3.1", "fs-extra": "^10.1.0", "js-yaml": "^4.1.0", "lazy-val": "^1.0.5", "lodash.escaperegexp": "^4.1.2", "lodash.isequal": "^4.5.0", "semver": "^7.6.3", "tiny-typed-emitter": "^2.1.0" } }, "sha512-Cr4GDOkbAUqRHP5/oeOmH/L2Bn6+FQPxVLZtPbcmKZC63a1F3uu5EefYOssgZXG3u/zBlubbJ5PJdITdMVggbw=="], @@ -1512,7 +1509,7 @@ "escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="], - "eslint": ["eslint@9.37.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.21.0", "@eslint/config-helpers": "^0.4.0", "@eslint/core": "^0.16.0", "@eslint/eslintrc": "^3.3.1", "@eslint/js": "9.37.0", "@eslint/plugin-kit": "^0.4.0", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.4.0", "eslint-visitor-keys": "^4.2.1", "espree": "^10.4.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-XyLmROnACWqSxiGYArdef1fItQd47weqB7iwtfr9JHwRrqIXZdcFMvvEcL9xHCmL0SNsOvF0c42lWyM1U5dgig=="], + "eslint": ["eslint@9.38.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.21.1", "@eslint/config-helpers": "^0.4.1", "@eslint/core": "^0.16.0", "@eslint/eslintrc": "^3.3.1", "@eslint/js": "9.38.0", "@eslint/plugin-kit": "^0.4.0", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.4.0", "eslint-visitor-keys": "^4.2.1", "espree": "^10.4.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-t5aPOpmtJcZcz5UJyY2GbvpDlsK5E8JqRqoKtfiKE3cNh437KIqfJr3A3AKf5k64NPx6d0G3dno6XDY05PqPtw=="], "eslint-plugin-react": ["eslint-plugin-react@7.37.5", "", { "dependencies": { "array-includes": "^3.1.8", "array.prototype.findlast": "^1.2.5", "array.prototype.flatmap": "^1.3.3", "array.prototype.tosorted": "^1.1.4", "doctrine": "^2.1.0", "es-iterator-helpers": "^1.2.1", "estraverse": "^5.3.0", "hasown": "^2.0.2", "jsx-ast-utils": "^2.4.1 || ^3.0.0", "minimatch": "^3.1.2", "object.entries": "^1.1.9", "object.fromentries": "^2.0.8", "object.values": "^1.2.1", "prop-types": "^15.8.1", "resolve": "^2.0.0-next.5", "semver": "^6.3.1", "string.prototype.matchall": "^4.0.12", "string.prototype.repeat": "^1.0.0" }, "peerDependencies": { "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7" } }, "sha512-Qteup0SqU15kdocexFNAJMvCJEfa2xUKNV4CC1xsVMrIIqEy3SQ/rqyxCWNzfrd3/ldy6HMlD2e0JDVpDg2qIA=="], @@ -1658,7 +1655,7 @@ "get-symbol-description": ["get-symbol-description@1.1.0", "", { "dependencies": { "call-bound": "^1.0.3", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.6" } }, "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg=="], - "get-tsconfig": ["get-tsconfig@4.12.0", "", { "dependencies": { "resolve-pkg-maps": "^1.0.0" } }, "sha512-LScr2aNr2FbjAjZh2C6X6BxRx1/x+aTDExct/xyq2XKbYOiG5c0aK7pMsSuyc0brz3ibr/lbQiHD9jzt4lccJw=="], + "get-tsconfig": ["get-tsconfig@4.13.0", "", { "dependencies": { "resolve-pkg-maps": "^1.0.0" } }, "sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ=="], "glob": ["glob@10.4.5", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg=="], @@ -1876,7 +1873,7 @@ "isbinaryfile": ["isbinaryfile@5.0.6", "", {}, "sha512-I+NmIfBHUl+r2wcDd6JwE9yWje/PIVY/R5/CmV8dXLZd5K+L9X2klAOwfAHNnondLXkbHyTAleQAWonpTJBTtw=="], - "isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], + "isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="], "istanbul-lib-coverage": ["istanbul-lib-coverage@3.2.2", "", {}, "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg=="], @@ -2106,7 +2103,7 @@ "markdown-table": ["markdown-table@3.0.4", "", {}, "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw=="], - "marked": ["marked@16.4.0", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-CTPAcRBq57cn3R8n3hwc2REddc28hjR7RzDXQ+lXLmMJYqn20BaI2cGw6QjgZGIgVfp2Wdfw4aMzgNteQ6qJgQ=="], + "marked": ["marked@16.4.1", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-ntROs7RaN3EvWfy3EZi14H4YxmT6A5YvywfhO+0pm+cH/dnSQRmdAmoFIc3B9aiwTehyk7pESH4ofyBY+V5hZg=="], "matcher": ["matcher@3.0.0", "", { "dependencies": { "escape-string-regexp": "^4.0.0" } }, "sha512-OkeDaAZ/bQCxeFAozM55PKcKU0yJMPGifLwV4Qgjitu+5MoAfSQN4lsLJeXZ1b8w0x+/Emda6MZgXS1jvsapng=="], @@ -2264,7 +2261,7 @@ "node-preload": ["node-preload@0.2.1", "", { "dependencies": { "process-on-spawn": "^1.0.0" } }, "sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ=="], - "node-releases": ["node-releases@2.0.23", "", {}, "sha512-cCmFDMSm26S6tQSDpBCg/NR8NENrVPhAJSf+XbxBG4rPFaaonlEoE9wHQmun+cls499TQGSb7ZyPBRlzgKfpeg=="], + "node-releases": ["node-releases@2.0.26", "", {}, "sha512-S2M9YimhSjBSvYnlr5/+umAnPHE++ODwt5e2Ij6FoX45HA/s4vHdkDx1eax2pAPeAOqu4s9b7ppahsyEFdVqQA=="], "normalize-path": ["normalize-path@3.0.0", "", {}, "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA=="], @@ -2322,7 +2319,7 @@ "package-json-from-dist": ["package-json-from-dist@1.0.1", "", {}, "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw=="], - "package-manager-detector": ["package-manager-detector@1.4.1", "", {}, "sha512-dSMiVLBEA4XaNJ0PRb4N5cV/SEP4BWrWZKBmfF+OUm2pQTiZ6DDkKeWaltwu3JRhLoy59ayIkJ00cx9K9CaYTg=="], + "package-manager-detector": ["package-manager-detector@1.5.0", "", {}, "sha512-uBj69dVlYe/+wxj8JOpr97XfsxH/eumMt6HqjNTmJDf/6NO9s+0uxeOneIz3AsPt2m6y9PqzDzd3ATcU17MNfw=="], "pako": ["pako@1.0.11", "", {}, "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw=="], @@ -2370,9 +2367,9 @@ "pkg-types": ["pkg-types@2.3.0", "", { "dependencies": { "confbox": "^0.2.2", "exsolve": "^1.0.7", "pathe": "^2.0.3" } }, "sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig=="], - "playwright": ["playwright@1.56.0", "", { "dependencies": { "playwright-core": "1.56.0" }, "optionalDependencies": { "fsevents": "2.3.2" }, "bin": { "playwright": "cli.js" } }, "sha512-X5Q1b8lOdWIE4KAoHpW3SE8HvUB+ZZsUoN64ZhjnN8dOb1UpujxBtENGiZFE+9F/yhzJwYa+ca3u43FeLbboHA=="], + "playwright": ["playwright@1.56.1", "", { "dependencies": { "playwright-core": "1.56.1" }, "optionalDependencies": { "fsevents": "2.3.2" }, "bin": { "playwright": "cli.js" } }, "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw=="], - "playwright-core": ["playwright-core@1.56.0", "", { "bin": { "playwright-core": "cli.js" } }, "sha512-1SXl7pMfemAMSDn5rkPeZljxOCYAmQnYLBTExuh6E8USHXGSX3dx6lYZN/xPpTz1vimXmPA9CDnILvmJaB8aSQ=="], + "playwright-core": ["playwright-core@1.56.1", "", { "bin": { "playwright-core": "cli.js" } }, "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ=="], "plimit-lit": ["plimit-lit@1.6.1", "", { "dependencies": { "queue-lit": "^1.5.1" } }, "sha512-B7+VDyb8Tl6oMJT9oSO2CW8XC/T4UcJGrwOVoNGwOQsQYhlpfajmrMj5xeejqaASq3V/EqThyOeATEOMuSEXiA=="], @@ -2390,7 +2387,7 @@ "postcss-value-parser": ["postcss-value-parser@4.2.0", "", {}, "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ=="], - "posthog-js": ["posthog-js@1.276.0", "", { "dependencies": { "@posthog/core": "1.3.0", "core-js": "^3.38.1", "fflate": "^0.4.8", "preact": "^10.19.3", "web-vitals": "^4.2.4" }, "peerDependencies": { "@rrweb/types": "2.0.0-alpha.17", "rrweb-snapshot": "2.0.0-alpha.17" }, "optionalPeers": ["@rrweb/types", "rrweb-snapshot"] }, "sha512-FYZE1037LrAoKKeUU0pUL7u8WwNK2BVeg5TFApwquVPUdj9h7u5Z077A313hPN19Ar+7Y+VHxqYqdHc4VNsVgw=="], + "posthog-js": ["posthog-js@1.279.3", "", { "dependencies": { "@posthog/core": "1.3.1", "core-js": "^3.38.1", "fflate": "^0.4.8", "preact": "^10.19.3", "web-vitals": "^4.2.4" } }, "sha512-09+hUgwY4W/+yTHk2mbxNiuu6NBCFzgaAcYkio1zphKZYcoQIehHOQsS1C8MHoyl3o8diZ98gAl2VJ6rS4GHaQ=="], "preact": ["preact@10.27.2", "", {}, "sha512-5SYSgFKSyhCbk6SrXyMpqjb5+MQBgfvEKE/OC+PujcY34sOpqtr+0AZQtPYx5IA6VxynQ7rUPCtKzyovpj9Bpg=="], @@ -2514,7 +2511,7 @@ "require-main-filename": ["require-main-filename@2.0.0", "", {}, "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg=="], - "resolve": ["resolve@1.22.10", "", { "dependencies": { "is-core-module": "^2.16.0", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" } }, "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w=="], + "resolve": ["resolve@1.22.11", "", { "dependencies": { "is-core-module": "^2.16.1", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" } }, "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ=="], "resolve-alpn": ["resolve-alpn@1.2.1", "", {}, "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g=="], @@ -2596,6 +2593,8 @@ "shell-quote": ["shell-quote@1.8.3", "", {}, "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw=="], + "shescape": ["shescape@2.1.6", "", { "dependencies": { "which": "^3.0.0 || ^4.0.0 || ^5.0.0" } }, "sha512-c9Ns1I+Tl0TC+cpsOT1FeZcvFalfd0WfHeD/CMccJH20xwochmJzq6AqtenndlyAw/BUi3BMcv92dYLVrqX+dw=="], + "shiki": ["shiki@3.13.0", "", { "dependencies": { "@shikijs/core": "3.13.0", "@shikijs/engine-javascript": "3.13.0", "@shikijs/engine-oniguruma": "3.13.0", "@shikijs/langs": "3.13.0", "@shikijs/themes": "3.13.0", "@shikijs/types": "3.13.0", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-aZW4l8Og16CokuCLf8CF8kq+KK2yOygapU5m3+hoGw0Mdosc6fPitjM+ujYarppj5ZIKGyPDPP1vqmQhr+5/0g=="], "side-channel": ["side-channel@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", "side-channel-list": "^1.0.0", "side-channel-map": "^1.0.1", "side-channel-weakmap": "^1.0.2" } }, "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw=="], @@ -2702,7 +2701,7 @@ "tailwind-merge": ["tailwind-merge@3.3.1", "", {}, "sha512-gBXpgUm/3rp1lMZZrM/w7D8GKqshif0zAymAhbCyIt8KMe+0v9DQ7cdYLR4FHH/cKpdTXb+A/tKKU3eolfsI+g=="], - "tailwindcss": ["tailwindcss@4.1.15", "", {}, "sha512-k2WLnWkYFkdpRv+Oby3EBXIyQC8/s1HOFMBUViwtAh6Z5uAozeUSMQlIsn/c6Q2iJzqG6aJT3wdPaRNj70iYxQ=="], + "tailwindcss": ["tailwindcss@4.1.16", "", {}, "sha512-pONL5awpaQX4LN5eiv7moSiSPd/DLDzKVRJz8Q9PgzmAdd1R4307GQS2ZpfiN7ZmekdQrfhZZiSE5jkLR4WNaA=="], "tapable": ["tapable@2.3.0", "", {}, "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg=="], @@ -2776,7 +2775,7 @@ "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="], - "typescript-eslint": ["typescript-eslint@8.46.1", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.46.1", "@typescript-eslint/parser": "8.46.1", "@typescript-eslint/typescript-estree": "8.46.1", "@typescript-eslint/utils": "8.46.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-VHgijW803JafdSsDO8I761r3SHrgk4T00IdyQ+/UsthtgPRsBWQLqoSxOolxTpxRKi1kGXK0bSz4CoAc9ObqJA=="], + "typescript-eslint": ["typescript-eslint@8.46.2", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.46.2", "@typescript-eslint/parser": "8.46.2", "@typescript-eslint/typescript-estree": "8.46.2", "@typescript-eslint/utils": "8.46.2" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-vbw8bOmiuYNdzzV3lsiWv6sRwjyuKJMQqWulBOU7M0RrxedXledX8G8kBbQeiOYDnTfiXz0Y4081E1QMNB6iQg=="], "uc.micro": ["uc.micro@2.1.0", "", {}, "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A=="], @@ -2788,13 +2787,13 @@ "undici": ["undici@7.16.0", "", {}, "sha512-QEg3HPMll0o3t2ourKwOeUAZ159Kn9mx5pnzHRQO8+Wixmh88YdZRiIwat0iNzNNXn0yoEtXJqFpyW7eM8BV7g=="], - "undici-types": ["undici-types@7.14.0", "", {}, "sha512-QQiYxHuyZ9gQUIrmPo3IA+hUl4KYk8uSA7cHrcKd/l3p1OTpZcM0Tbp9x7FAtXdAYhlasd60ncPpgu6ihG6TOA=="], + "undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], "unified": ["unified@11.0.5", "", { "dependencies": { "@types/unist": "^3.0.0", "bail": "^2.0.0", "devlop": "^1.0.0", "extend": "^3.0.0", "is-plain-obj": "^4.0.0", "trough": "^2.0.0", "vfile": "^6.0.0" } }, "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA=="], "unist-util-find-after": ["unist-util-find-after@5.0.0", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0" } }, "sha512-amQa0Ep2m6hE2g72AugUItjbuM8X8cGQnFoHk0pGfrFeT9GZhzN5SW8nRsiGKK7Aif4CrACPENkA6P/Lw6fHGQ=="], - "unist-util-is": ["unist-util-is@6.0.0", "", { "dependencies": { "@types/unist": "^3.0.0" } }, "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw=="], + "unist-util-is": ["unist-util-is@6.0.1", "", { "dependencies": { "@types/unist": "^3.0.0" } }, "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g=="], "unist-util-position": ["unist-util-position@5.0.0", "", { "dependencies": { "@types/unist": "^3.0.0" } }, "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA=="], @@ -2804,7 +2803,7 @@ "unist-util-visit": ["unist-util-visit@5.0.0", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0", "unist-util-visit-parents": "^6.0.0" } }, "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg=="], - "unist-util-visit-parents": ["unist-util-visit-parents@6.0.1", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0" } }, "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw=="], + "unist-util-visit-parents": ["unist-util-visit-parents@6.0.2", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0" } }, "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ=="], "universalify": ["universalify@2.0.1", "", {}, "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw=="], @@ -2816,7 +2815,7 @@ "unzip-crx-3": ["unzip-crx-3@0.2.0", "", { "dependencies": { "jszip": "^3.1.0", "mkdirp": "^0.5.1", "yaku": "^0.16.6" } }, "sha512-0+JiUq/z7faJ6oifVB5nSwt589v1KCduqIJupNVDoWSXZtWDmjDGO3RAEOvwJ07w90aoXoP4enKsR7ecMrJtWQ=="], - "update-browserslist-db": ["update-browserslist-db@1.1.3", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw=="], + "update-browserslist-db": ["update-browserslist-db@1.1.4", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-q0SPT4xyU84saUX+tomz1WLkxUbuaJnR1xWt17M7fJtEJigJeWUNGUqrauFXsHnqev9y9JTRGwk13tFBuKby4A=="], "uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="], @@ -2844,7 +2843,7 @@ "vfile-message": ["vfile-message@4.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-stringify-position": "^4.0.0" } }, "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw=="], - "vite": ["vite@7.1.11", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-uzcxnSDVjAopEUjljkWh8EIrg6tlzrjFUfMcR1EVsRDGwf/ccef0qQPRyOrROwhrTDaApueq+ja+KLPlzR/zdg=="], + "vite": ["vite@7.1.12", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-ZWyE8YXEXqJrrSLvYgrRP7p62OziLW7xI5HYGWFzOvupfAlrLvURSzv/FyGyy0eidogEM3ujU+kUG1zuHgb6Ug=="], "vite-plugin-svgr": ["vite-plugin-svgr@4.5.0", "", { "dependencies": { "@rollup/pluginutils": "^5.2.0", "@svgr/core": "^8.1.0", "@svgr/plugin-jsx": "^8.1.0" }, "peerDependencies": { "vite": ">=2.6.0" } }, "sha512-W+uoSpmVkSmNOGPSsDCWVW/DDAyv+9fap9AZXBvWiQqrboJ08j2vh0tFxTD/LjwqwAd3yYSVJgm54S/1GhbdnA=="], @@ -2874,7 +2873,7 @@ "webpack-virtual-modules": ["webpack-virtual-modules@0.6.2", "", {}, "sha512-66/V2i5hQanC51vBQKPH4aI8NMAcBW59FVBs+rC7eGHupMyfn34q7rZIE+ETlJ+XTevqfUhVVBgSUNSW2flEUQ=="], - "which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], + "which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="], "which-boxed-primitive": ["which-boxed-primitive@1.1.1", "", { "dependencies": { "is-bigint": "^1.1.0", "is-boolean-object": "^1.2.1", "is-number-object": "^1.1.1", "is-string": "^1.1.1", "is-symbol": "^1.1.1" } }, "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA=="], @@ -2910,8 +2909,6 @@ "yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], - "yaml": ["yaml@1.10.2", "", {}, "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg=="], - "yargs": ["yargs@17.7.2", "", { "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" } }, "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="], "yargs-parser": ["yargs-parser@21.1.1", "", {}, "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="], @@ -3042,15 +3039,13 @@ "@storybook/addon-actions/uuid": ["uuid@9.0.1", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA=="], - "@storybook/core/esbuild": ["esbuild@0.18.20", "", { "optionalDependencies": { "@esbuild/android-arm": "0.18.20", "@esbuild/android-arm64": "0.18.20", "@esbuild/android-x64": "0.18.20", "@esbuild/darwin-arm64": "0.18.20", "@esbuild/darwin-x64": "0.18.20", "@esbuild/freebsd-arm64": "0.18.20", "@esbuild/freebsd-x64": "0.18.20", "@esbuild/linux-arm": "0.18.20", "@esbuild/linux-arm64": "0.18.20", "@esbuild/linux-ia32": "0.18.20", "@esbuild/linux-loong64": "0.18.20", "@esbuild/linux-mips64el": "0.18.20", "@esbuild/linux-ppc64": "0.18.20", "@esbuild/linux-riscv64": "0.18.20", "@esbuild/linux-s390x": "0.18.20", "@esbuild/linux-x64": "0.18.20", "@esbuild/netbsd-x64": "0.18.20", "@esbuild/openbsd-x64": "0.18.20", "@esbuild/sunos-x64": "0.18.20", "@esbuild/win32-arm64": "0.18.20", "@esbuild/win32-ia32": "0.18.20", "@esbuild/win32-x64": "0.18.20" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA=="], - "@storybook/core/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="], "@storybook/test-runner/jest": ["jest@29.7.0", "", { "dependencies": { "@jest/core": "^29.7.0", "@jest/types": "^29.6.3", "import-local": "^3.0.2", "jest-cli": "^29.7.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "optionalPeers": ["node-notifier"], "bin": { "jest": "bin/jest.js" } }, "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw=="], - "@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.5.0", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" }, "bundled": true }, "sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg=="], + "@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.6.0", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" }, "bundled": true }, "sha512-zq/ay+9fNIJJtJiZxdTnXS20PllcYMX3OE23ESc4HK/bdYu3cOWYVhsOhVnXALfU/uqJIxn5NBPd9z4v+SfoSg=="], - "@tailwindcss/oxide-wasm32-wasi/@emnapi/runtime": ["@emnapi/runtime@1.5.0", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ=="], + "@tailwindcss/oxide-wasm32-wasi/@emnapi/runtime": ["@emnapi/runtime@1.6.0", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-obtUmAHTMjll499P+D9A3axeJFlhdjOWdKUNs/U6QIGT7V5RjcUW1xToAzjvmgTSQhDbYn/NwfTRoJcQ2rNBxA=="], "@tailwindcss/oxide-wasm32-wasi/@emnapi/wasi-threads": ["@emnapi/wasi-threads@1.1.0", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ=="], @@ -3092,8 +3087,6 @@ "babel-plugin-istanbul/istanbul-lib-instrument": ["istanbul-lib-instrument@5.2.1", "", { "dependencies": { "@babel/core": "^7.12.3", "@babel/parser": "^7.14.7", "@istanbuljs/schema": "^0.1.2", "istanbul-lib-coverage": "^3.2.0", "semver": "^6.3.0" } }, "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg=="], - "babel-plugin-macros/cosmiconfig": ["cosmiconfig@7.1.0", "", { "dependencies": { "@types/parse-json": "^4.0.0", "import-fresh": "^3.2.1", "parse-json": "^5.0.0", "path-type": "^4.0.0", "yaml": "^1.10.0" } }, "sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA=="], - "bl/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], "builder-util/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], @@ -3116,6 +3109,8 @@ "create-jest/jest-config": ["jest-config@29.7.0", "", { "dependencies": { "@babel/core": "^7.11.6", "@jest/test-sequencer": "^29.7.0", "@jest/types": "^29.6.3", "babel-jest": "^29.7.0", "chalk": "^4.0.0", "ci-info": "^3.2.0", "deepmerge": "^4.2.2", "glob": "^7.1.3", "graceful-fs": "^4.2.9", "jest-circus": "^29.7.0", "jest-environment-node": "^29.7.0", "jest-get-type": "^29.6.3", "jest-regex-util": "^29.6.3", "jest-resolve": "^29.7.0", "jest-runner": "^29.7.0", "jest-util": "^29.7.0", "jest-validate": "^29.7.0", "micromatch": "^4.0.4", "parse-json": "^5.2.0", "pretty-format": "^29.7.0", "slash": "^3.0.0", "strip-json-comments": "^3.1.1" }, "peerDependencies": { "@types/node": "*", "ts-node": ">=9.0.0" }, "optionalPeers": ["@types/node", "ts-node"] }, "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ=="], + "cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], + "cytoscape-fcose/cose-base": ["cose-base@2.2.0", "", { "dependencies": { "layout-base": "^2.0.0" } }, "sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g=="], "d3-dsv/commander": ["commander@7.2.0", "", {}, "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw=="], @@ -3130,7 +3125,7 @@ "dom-serializer/entities": ["entities@2.2.0", "", {}, "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A=="], - "electron/@types/node": ["@types/node@22.18.10", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-anNG/V/Efn/YZY4pRzbACnKxNKoBng2VTFydVu8RRs5hQjikP8CQfaeAV59VFSCzKNp90mXiVXW2QzV56rwMrg=="], + "electron/@types/node": ["@types/node@22.18.12", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-BICHQ67iqxQGFSzfCFTT7MRQ5XcBjG5aeKh5Ok38UBbPe5fxTyE+aHFxwVrGyr8GNlqFMLKD1D3P2K/1ks8tog=="], "electron-builder/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], @@ -3378,6 +3373,8 @@ "spawn-wrap/signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="], + "spawn-wrap/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], + "spawnd/signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="], "stack-utils/escape-string-regexp": ["escape-string-regexp@2.0.0", "", {}, "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w=="], @@ -3538,50 +3535,6 @@ "@jest/types/chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - "@storybook/core/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.18.20", "", { "os": "android", "cpu": "arm" }, "sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw=="], - - "@storybook/core/esbuild/@esbuild/android-arm64": ["@esbuild/android-arm64@0.18.20", "", { "os": "android", "cpu": "arm64" }, "sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ=="], - - "@storybook/core/esbuild/@esbuild/android-x64": ["@esbuild/android-x64@0.18.20", "", { "os": "android", "cpu": "x64" }, "sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg=="], - - "@storybook/core/esbuild/@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.18.20", "", { "os": "darwin", "cpu": "arm64" }, "sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA=="], - - "@storybook/core/esbuild/@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.18.20", "", { "os": "darwin", "cpu": "x64" }, "sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ=="], - - "@storybook/core/esbuild/@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.18.20", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw=="], - - "@storybook/core/esbuild/@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.18.20", "", { "os": "freebsd", "cpu": "x64" }, "sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ=="], - - "@storybook/core/esbuild/@esbuild/linux-arm": ["@esbuild/linux-arm@0.18.20", "", { "os": "linux", "cpu": "arm" }, "sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg=="], - - "@storybook/core/esbuild/@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.18.20", "", { "os": "linux", "cpu": "arm64" }, "sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA=="], - - "@storybook/core/esbuild/@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.18.20", "", { "os": "linux", "cpu": "ia32" }, "sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA=="], - - "@storybook/core/esbuild/@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.18.20", "", { "os": "linux", "cpu": "none" }, "sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg=="], - - "@storybook/core/esbuild/@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.18.20", "", { "os": "linux", "cpu": "none" }, "sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ=="], - - "@storybook/core/esbuild/@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.18.20", "", { "os": "linux", "cpu": "ppc64" }, "sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA=="], - - "@storybook/core/esbuild/@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.18.20", "", { "os": "linux", "cpu": "none" }, "sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A=="], - - "@storybook/core/esbuild/@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.18.20", "", { "os": "linux", "cpu": "s390x" }, "sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ=="], - - "@storybook/core/esbuild/@esbuild/linux-x64": ["@esbuild/linux-x64@0.18.20", "", { "os": "linux", "cpu": "x64" }, "sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w=="], - - "@storybook/core/esbuild/@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.18.20", "", { "os": "none", "cpu": "x64" }, "sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A=="], - - "@storybook/core/esbuild/@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.18.20", "", { "os": "openbsd", "cpu": "x64" }, "sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg=="], - - "@storybook/core/esbuild/@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.18.20", "", { "os": "sunos", "cpu": "x64" }, "sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ=="], - - "@storybook/core/esbuild/@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.18.20", "", { "os": "win32", "cpu": "arm64" }, "sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg=="], - - "@storybook/core/esbuild/@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.18.20", "", { "os": "win32", "cpu": "ia32" }, "sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g=="], - - "@storybook/core/esbuild/@esbuild/win32-x64": ["@esbuild/win32-x64@0.18.20", "", { "os": "win32", "cpu": "x64" }, "sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ=="], - "@storybook/test-runner/jest/@jest/core": ["@jest/core@29.7.0", "", { "dependencies": { "@jest/console": "^29.7.0", "@jest/reporters": "^29.7.0", "@jest/test-result": "^29.7.0", "@jest/transform": "^29.7.0", "@jest/types": "^29.6.3", "@types/node": "*", "ansi-escapes": "^4.2.1", "chalk": "^4.0.0", "ci-info": "^3.2.0", "exit": "^0.1.2", "graceful-fs": "^4.2.9", "jest-changed-files": "^29.7.0", "jest-config": "^29.7.0", "jest-haste-map": "^29.7.0", "jest-message-util": "^29.7.0", "jest-regex-util": "^29.6.3", "jest-resolve": "^29.7.0", "jest-resolve-dependencies": "^29.7.0", "jest-runner": "^29.7.0", "jest-runtime": "^29.7.0", "jest-snapshot": "^29.7.0", "jest-util": "^29.7.0", "jest-validate": "^29.7.0", "jest-watcher": "^29.7.0", "micromatch": "^4.0.4", "pretty-format": "^29.7.0", "slash": "^3.0.0", "strip-ansi": "^6.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "optionalPeers": ["node-notifier"] }, "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg=="], "@storybook/test-runner/jest/jest-cli": ["jest-cli@29.7.0", "", { "dependencies": { "@jest/core": "^29.7.0", "@jest/test-result": "^29.7.0", "@jest/types": "^29.6.3", "chalk": "^4.0.0", "create-jest": "^29.7.0", "exit": "^0.1.2", "import-local": "^3.0.2", "jest-config": "^29.7.0", "jest-util": "^29.7.0", "jest-validate": "^29.7.0", "yargs": "^17.3.1" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "optionalPeers": ["node-notifier"], "bin": { "jest": "bin/jest.js" } }, "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg=="], @@ -3644,6 +3597,8 @@ "create-jest/jest-config/pretty-format": ["pretty-format@29.7.0", "", { "dependencies": { "@jest/schemas": "^29.6.3", "ansi-styles": "^5.0.0", "react-is": "^18.0.0" } }, "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ=="], + "cross-spawn/which/isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], + "cytoscape-fcose/cose-base/layout-base": ["layout-base@2.0.1", "", {}, "sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg=="], "d3-sankey/d3-array/internmap": ["internmap@1.0.1", "", {}, "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw=="], @@ -3690,6 +3645,8 @@ "glob/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], + "global-prefix/which/isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], + "istanbul-lib-report/make-dir/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="], "jest-changed-files/jest-util/@jest/types": ["@jest/types@30.2.0", "", { "dependencies": { "@jest/pattern": "30.0.1", "@jest/schemas": "30.0.5", "@types/istanbul-lib-coverage": "^2.0.6", "@types/istanbul-reports": "^3.0.4", "@types/node": "*", "@types/yargs": "^17.0.33", "chalk": "^4.1.2" } }, "sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg=="], @@ -3860,6 +3817,8 @@ "readdir-glob/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], + "spawn-wrap/which/isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], + "string-length/strip-ansi/ansi-regex": ["ansi-regex@6.2.2", "", {}, "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg=="], "wait-port/chalk/ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="], diff --git a/docs/AGENTS.md b/docs/AGENTS.md index 55d8db0cd2..2d42422557 100644 --- a/docs/AGENTS.md +++ b/docs/AGENTS.md @@ -110,7 +110,7 @@ Verify with React DevTools Profiler - MarkdownCore should only re-render when co ## Documentation Guidelines -**Free-floating markdown docs are not permitted.** Documentation must be organized: +**Free-floating markdown docs are not permitted.** Documentation must be organized. Do not create standalone markdown files in the project root or random locations, even for implementation summaries or planning documents - use the propose_plan tool or inline comments instead. - **User-facing docs** → `./docs/` directory - **IMPORTANT**: Read `docs/README.md` first before writing user-facing documentation @@ -119,6 +119,7 @@ Verify with React DevTools Profiler - MarkdownCore should only re-render when co - Use standard markdown + mermaid diagrams - **Developer docs** → inline with the code its documenting as comments. Consider them notes as notes to future Assistants to understand the logic more quickly. **DO NOT** create standalone documentation files in the project root or random locations. +- **Test documentation** → inline comments in test files explaining complex test setup or edge cases, NOT separate README files. **NEVER create markdown documentation files (README, guides, summaries, etc.) in the project root during feature development unless the user explicitly requests documentation.** Code + tests + inline comments are complete documentation. @@ -204,6 +205,7 @@ This project uses **Make** as the primary build orchestrator. See `Makefile` for - **Integration tests:** - Run specific integration test: `TEST_INTEGRATION=1 bun x jest tests/ipcMain/sendMessage.test.ts -t "test name pattern"` - Run all integration tests: `TEST_INTEGRATION=1 bun x jest tests` (~35 seconds, runs 40 tests) + - **⚠️ Running `tests/ipcMain` locally takes a very long time.** Prefer running specific test files or use `-t` to filter to specific tests. - **Performance**: Tests use `test.concurrent()` to run in parallel within each file - **NEVER bypass IPC in integration tests** - Integration tests must use the real IPC communication paths (e.g., `mockIpcRenderer.invoke()`) even when it's harder. Directly accessing services (HistoryService, PartialService, etc.) or manipulating config/state directly bypasses the integration layer and defeats the purpose of the test. diff --git a/package.json b/package.json index 64a4c210ad..06988e12cf 100644 --- a/package.json +++ b/package.json @@ -41,7 +41,8 @@ "docs:watch": "make docs-watch", "storybook": "make storybook", "storybook:build": "make storybook-build", - "test:storybook": "make test-storybook" + "test:storybook": "make test-storybook", + "rebuild": "echo \"No native modules to rebuild\"" }, "dependencies": { "@ai-sdk/anthropic": "^2.0.29", @@ -69,6 +70,7 @@ "markdown-it": "^14.1.0", "minimist": "^1.2.8", "rehype-harden": "^1.1.5", + "shescape": "^2.1.6", "source-map-support": "^0.5.21", "streamdown": "^1.4.0", "undici": "^7.16.0", diff --git a/scripts/wait_pr_checks.sh b/scripts/wait_pr_checks.sh index a3a99c1c64..8e74ac9833 100755 --- a/scripts/wait_pr_checks.sh +++ b/scripts/wait_pr_checks.sh @@ -123,6 +123,10 @@ while true; do echo "💡 To extract detailed logs from the failed run:" echo " ./scripts/extract_pr_logs.sh $PR_NUMBER" echo " ./scripts/extract_pr_logs.sh $PR_NUMBER # e.g., Integration" + echo "" + echo "💡 To re-run a subset of integration tests faster with workflow_dispatch:" + echo " gh workflow run ci.yml --ref $(git rev-parse --abbrev-ref HEAD) -f test_filter=\"tests/ipcMain/specificTest.test.ts\"" + echo " gh workflow run ci.yml --ref $(git rev-parse --abbrev-ref HEAD) -f test_filter=\"-t 'specific test name'\"" exit 1 fi diff --git a/src/App.tsx b/src/App.tsx index 98918d7a14..7092fa9fc7 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -22,11 +22,13 @@ import { CommandPalette } from "./components/CommandPalette"; import { buildCoreSources, type BuildSourcesParams } from "./utils/commands/sources"; import type { ThinkingLevel } from "./types/thinking"; +import type { RuntimeConfig } from "./types/runtime"; import { CUSTOM_EVENTS } from "./constants/events"; import { isWorkspaceForkSwitchEvent } from "./utils/workspaceFork"; import { getThinkingLevelKey } from "./constants/storage"; import type { BranchListResult } from "./types/ipc"; import { useTelemetry } from "./hooks/useTelemetry"; +import { parseRuntimeString } from "./utils/chatCommands"; const THINKING_LEVELS: ThinkingLevel[] = ["off", "low", "medium", "high"]; @@ -233,7 +235,11 @@ function AppInner() { [handleRemoveProject] ); - const handleCreateWorkspace = async (branchName: string, trunkBranch: string) => { + const handleCreateWorkspace = async ( + branchName: string, + trunkBranch: string, + runtime?: string + ) => { if (!workspaceModalProject) return; console.assert( @@ -241,7 +247,23 @@ function AppInner() { "Expected trunk branch to be provided by the workspace modal" ); - const newWorkspace = await createWorkspace(workspaceModalProject, branchName, trunkBranch); + // Parse runtime config if provided + let runtimeConfig: RuntimeConfig | undefined; + if (runtime) { + try { + runtimeConfig = parseRuntimeString(runtime, branchName); + } catch (err) { + console.error("Failed to parse runtime config:", err); + throw err; // Let modal handle the error + } + } + + const newWorkspace = await createWorkspace( + workspaceModalProject, + branchName, + trunkBranch, + runtimeConfig + ); if (newWorkspace) { // Track workspace creation telemetry.workspaceCreated(newWorkspace.workspaceId); @@ -406,21 +428,6 @@ function AppInner() { [handleAddWorkspace] ); - const createWorkspaceFromPalette = useCallback( - async (projectPath: string, branchName: string, trunkBranch: string) => { - console.assert( - typeof trunkBranch === "string" && trunkBranch.trim().length > 0, - "Expected trunk branch to be provided by the command palette" - ); - const newWs = await createWorkspace(projectPath, branchName, trunkBranch); - if (newWs) { - telemetry.workspaceCreated(newWs.workspaceId); - setSelectedWorkspace(newWs); - } - }, - [createWorkspace, setSelectedWorkspace, telemetry] - ); - const getBranchesForProject = useCallback( async (projectPath: string): Promise => { const branchResult = await window.api.projects.listBranches(projectPath); @@ -488,7 +495,6 @@ function AppInner() { getThinkingLevel: getThinkingLevelForWorkspace, onSetThinkingLevel: setThinkingLevelFromPalette, onOpenNewWorkspaceModal: openNewWorkspaceFromPalette, - onCreateWorkspace: createWorkspaceFromPalette, getBranchesForProject, onSelectWorkspace: selectWorkspaceFromPalette, onRemoveWorkspace: removeWorkspaceFromPalette, diff --git a/src/components/NewWorkspaceModal.tsx b/src/components/NewWorkspaceModal.tsx index 893a37573b..9bccd4fd8c 100644 --- a/src/components/NewWorkspaceModal.tsx +++ b/src/components/NewWorkspaceModal.tsx @@ -10,9 +10,13 @@ interface NewWorkspaceModalProps { defaultTrunkBranch?: string; loadErrorMessage?: string | null; onClose: () => void; - onAdd: (branchName: string, trunkBranch: string) => Promise; + onAdd: (branchName: string, trunkBranch: string, runtime?: string) => Promise; } +// Shared form field styles +const formFieldClasses = + "[&_label]:text-foreground [&_input]:bg-modal-bg [&_input]:border-border-medium [&_input]:focus:border-accent [&_select]:bg-modal-bg [&_select]:border-border-medium [&_select]:focus:border-accent [&_option]:bg-modal-bg mb-5 [&_input]:w-full [&_input]:rounded [&_input]:border [&_input]:px-3 [&_input]:py-2 [&_input]:text-sm [&_input]:text-white [&_input]:focus:outline-none [&_input]:disabled:cursor-not-allowed [&_input]:disabled:opacity-60 [&_label]:mb-2 [&_label]:block [&_label]:text-sm [&_option]:text-white [&_select]:w-full [&_select]:cursor-pointer [&_select]:rounded [&_select]:border [&_select]:px-3 [&_select]:py-2 [&_select]:text-sm [&_select]:text-white [&_select]:focus:outline-none [&_select]:disabled:cursor-not-allowed [&_select]:disabled:opacity-60"; + const NewWorkspaceModal: React.FC = ({ isOpen, projectName, @@ -24,6 +28,8 @@ const NewWorkspaceModal: React.FC = ({ }) => { const [branchName, setBranchName] = useState(""); const [trunkBranch, setTrunkBranch] = useState(defaultTrunkBranch ?? branches[0] ?? ""); + const [runtimeMode, setRuntimeMode] = useState<"local" | "ssh">("local"); + const [sshHost, setSshHost] = useState(""); const [isLoading, setIsLoading] = useState(false); const [error, setError] = useState(null); const infoId = useId(); @@ -53,6 +59,8 @@ const NewWorkspaceModal: React.FC = ({ const handleCancel = () => { setBranchName(""); setTrunkBranch(defaultTrunkBranch ?? branches[0] ?? ""); + setRuntimeMode("local"); + setSshHost(""); setError(loadErrorMessage ?? null); onClose(); }; @@ -74,13 +82,29 @@ const NewWorkspaceModal: React.FC = ({ console.assert(normalizedTrunkBranch.length > 0, "Expected trunk branch name to be validated"); console.assert(trimmedBranchName.length > 0, "Expected branch name to be validated"); + // Validate SSH host if SSH runtime selected + if (runtimeMode === "ssh") { + const trimmedHost = sshHost.trim(); + if (trimmedHost.length === 0) { + setError("SSH host is required (e.g., hostname or user@host)"); + return; + } + // Accept both "hostname" and "user@hostname" formats + // SSH will use current user or ~/.ssh/config if user not specified + } + setIsLoading(true); setError(null); try { - await onAdd(trimmedBranchName, normalizedTrunkBranch); + // Build runtime string if SSH selected + const runtime = runtimeMode === "ssh" ? `ssh ${sshHost.trim()}` : undefined; + + await onAdd(trimmedBranchName, normalizedTrunkBranch, runtime); setBranchName(""); setTrunkBranch(defaultTrunkBranch ?? branches[0] ?? ""); + setRuntimeMode("local"); + setSshHost(""); onClose(); } catch (err) { const message = err instanceof Error ? err.message : "Failed to create workspace"; @@ -100,7 +124,7 @@ const NewWorkspaceModal: React.FC = ({ describedById={infoId} >
void handleSubmit(event)}> -
+
-
+
{hasBranches ? ( { + setRuntimeMode(event.target.value as "local" | "ssh"); + setError(null); + }} + disabled={isLoading} + > + + + +
+ + {runtimeMode === "ssh" && ( +
+ + { + setSshHost(event.target.value); + setError(null); + }} + placeholder="hostname or user@hostname" + disabled={isLoading} + required + aria-required="true" + /> +
+ Workspace will be created at ~/cmux/{branchName || ""} on remote host +
+
+ )} +

This will create a git worktree at:

- ~/.cmux/src/{projectName}/{branchName || ""} + {runtimeMode === "ssh" + ? `${sshHost || ""}:~/cmux/${branchName || ""}` + : `~/.cmux/src/${projectName}/${branchName || ""}`}
@@ -184,7 +248,11 @@ const NewWorkspaceModal: React.FC = ({
Equivalent command:
- {formatNewCommand(branchName.trim(), trunkBranch.trim() || undefined)} + {formatNewCommand( + branchName.trim(), + trunkBranch.trim() || undefined, + runtimeMode === "ssh" && sshHost.trim() ? `ssh ${sshHost.trim()}` : undefined + )}
)} diff --git a/src/config.ts b/src/config.ts index 2a8ab46f1e..91512ca9b7 100644 --- a/src/config.ts +++ b/src/config.ts @@ -129,24 +129,6 @@ export class Config { * Get the workspace worktree path for a given directory name. * The directory name is the workspace name (branch name). */ - getWorkspacePath(projectPath: string, directoryName: string): string { - const projectName = this.getProjectName(projectPath); - return path.join(this.srcDir, projectName, directoryName); - } - - /** - * Compute workspace path from metadata. - * Directory uses workspace name (e.g., ~/.cmux/src/project/workspace-name). - */ - getWorkspacePaths(metadata: WorkspaceMetadata): { - /** Worktree path (uses workspace name as directory) */ - namedWorkspacePath: string; - } { - const path = this.getWorkspacePath(metadata.projectPath, metadata.name); - return { - namedWorkspacePath: path, - }; - } /** * Add paths to WorkspaceMetadata to create FrontendWorkspaceMetadata. @@ -274,6 +256,8 @@ export class Config { projectPath, // GUARANTEE: All workspaces must have createdAt (assign now if missing) createdAt: workspace.createdAt ?? new Date().toISOString(), + // Include runtime config if present (for SSH workspaces) + runtimeConfig: workspace.runtimeConfig, }; // Migrate missing createdAt to config for next load @@ -383,7 +367,10 @@ export class Config { // Check if workspace already exists (by ID) const existingIndex = project.workspaces.findIndex((w) => w.id === metadata.id); - const workspacePath = this.getWorkspacePath(projectPath, metadata.name); + // Compute workspace path - this is only for legacy config migration + // New code should use Runtime.getWorkspacePath() directly + const projectName = this.getProjectName(projectPath); + const workspacePath = path.join(this.srcDir, projectName, metadata.name); const workspaceEntry: Workspace = { path: workspacePath, id: metadata.id, diff --git a/src/constants/env.ts b/src/constants/env.ts new file mode 100644 index 0000000000..d443c60fcb --- /dev/null +++ b/src/constants/env.ts @@ -0,0 +1,14 @@ +/** + * Standard environment variables for non-interactive command execution. + * These prevent tools from blocking on editor/credential prompts. + */ +export const NON_INTERACTIVE_ENV_VARS = { + // Prevent interactive editors from blocking execution + // Critical for git operations like rebase/commit that try to open editors + GIT_EDITOR: "true", // Git-specific editor (highest priority) + GIT_SEQUENCE_EDITOR: "true", // For interactive rebase sequences + EDITOR: "true", // General fallback for non-git commands + VISUAL: "true", // Another common editor environment variable + // Prevent git from prompting for credentials + GIT_TERMINAL_PROMPT: "0", // Disables git credential prompts +} as const; diff --git a/src/constants/exitCodes.ts b/src/constants/exitCodes.ts new file mode 100644 index 0000000000..619cee68c7 --- /dev/null +++ b/src/constants/exitCodes.ts @@ -0,0 +1,15 @@ +/** + * Special exit codes used by Runtime implementations to communicate + * expected error conditions (timeout, abort) without throwing exceptions. + * + * These are distinct from standard Unix exit codes and signals: + * - Normal exit: 0-255 + * - Signal death: typically -1 to -64 (negative signal numbers) + * - Special runtime codes: -997, -998 (far outside normal range) + */ + +/** Process was aborted via AbortSignal */ +export const EXIT_CODE_ABORTED = -997; + +/** Process exceeded configured timeout */ +export const EXIT_CODE_TIMEOUT = -998; diff --git a/src/contexts/AppContext.tsx b/src/contexts/AppContext.tsx index 4a7bbbcb3a..5663dea1f8 100644 --- a/src/contexts/AppContext.tsx +++ b/src/contexts/AppContext.tsx @@ -3,6 +3,7 @@ import { createContext, useContext } from "react"; import type { ProjectConfig } from "@/config"; import type { FrontendWorkspaceMetadata } from "@/types/workspace"; import type { WorkspaceSelection } from "@/components/ProjectSidebar"; +import type { RuntimeConfig } from "@/types/runtime"; /** * App-level state and operations shared across the component tree. @@ -21,7 +22,8 @@ interface AppContextType { createWorkspace: ( projectPath: string, branchName: string, - trunkBranch: string + trunkBranch: string, + runtimeConfig?: RuntimeConfig ) => Promise<{ projectPath: string; projectName: string; diff --git a/src/git.ts b/src/git.ts index 03c7c705b8..617c33afcb 100644 --- a/src/git.ts +++ b/src/git.ts @@ -1,7 +1,9 @@ import * as fs from "fs"; import * as path from "path"; import type { Config } from "./config"; +import type { RuntimeConfig } from "./types/runtime"; import { execAsync } from "./utils/disposableExec"; +import { createRuntime } from "./runtime/runtimeFactory"; export interface WorktreeResult { success: boolean; @@ -13,6 +15,8 @@ export interface CreateWorktreeOptions { trunkBranch: string; /** Directory name to use for the worktree (if not provided, uses branchName) */ directoryName?: string; + /** Runtime configuration (needed to compute workspace path) */ + runtimeConfig?: RuntimeConfig; } export async function listLocalBranches(projectPath: string): Promise { @@ -78,7 +82,11 @@ export async function createWorktree( try { // Use directoryName if provided, otherwise fall back to branchName (legacy) const dirName = options.directoryName ?? branchName; - const workspacePath = config.getWorkspacePath(projectPath, dirName); + // Compute workspace path using Runtime (single source of truth) + const runtime = createRuntime( + options.runtimeConfig ?? { type: "local", srcBaseDir: config.srcDir } + ); + const workspacePath = runtime.getWorkspacePath(projectPath, dirName); const { trunkBranch } = options; const normalizedTrunkBranch = typeof trunkBranch === "string" ? trunkBranch.trim() : ""; diff --git a/src/hooks/useWorkspaceManagement.ts b/src/hooks/useWorkspaceManagement.ts index b0bd62a163..9918c90423 100644 --- a/src/hooks/useWorkspaceManagement.ts +++ b/src/hooks/useWorkspaceManagement.ts @@ -2,6 +2,7 @@ import { useState, useEffect, useCallback } from "react"; import type { FrontendWorkspaceMetadata } from "@/types/workspace"; import type { WorkspaceSelection } from "@/components/ProjectSidebar"; import type { ProjectConfig } from "@/config"; +import type { RuntimeConfig } from "@/types/runtime"; import { deleteWorkspaceStorage } from "@/constants/storage"; interface UseWorkspaceManagementProps { @@ -101,12 +102,22 @@ export function useWorkspaceManagement({ }; }, [onProjectsUpdate]); - const createWorkspace = async (projectPath: string, branchName: string, trunkBranch: string) => { + const createWorkspace = async ( + projectPath: string, + branchName: string, + trunkBranch: string, + runtimeConfig?: RuntimeConfig + ) => { console.assert( typeof trunkBranch === "string" && trunkBranch.trim().length > 0, "Expected trunk branch to be provided when creating a workspace" ); - const result = await window.api.workspace.create(projectPath, branchName, trunkBranch); + const result = await window.api.workspace.create( + projectPath, + branchName, + trunkBranch, + runtimeConfig + ); if (result.success) { // Backend has already updated the config - reload projects to get updated state const projectsList = await window.api.projects.list(); diff --git a/src/preload.ts b/src/preload.ts index 7fc5d49e5a..dfb2ad6b74 100644 --- a/src/preload.ts +++ b/src/preload.ts @@ -49,8 +49,14 @@ const api: IPCApi = { }, workspace: { list: () => ipcRenderer.invoke(IPC_CHANNELS.WORKSPACE_LIST), - create: (projectPath, branchName, trunkBranch: string) => - ipcRenderer.invoke(IPC_CHANNELS.WORKSPACE_CREATE, projectPath, branchName, trunkBranch), + create: (projectPath, branchName, trunkBranch: string, runtimeConfig?) => + ipcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_CREATE, + projectPath, + branchName, + trunkBranch, + runtimeConfig + ), remove: (workspaceId: string, options?: { force?: boolean }) => ipcRenderer.invoke(IPC_CHANNELS.WORKSPACE_REMOVE, workspaceId, options), rename: (workspaceId: string, newName: string) => diff --git a/src/runtime/LocalRuntime.ts b/src/runtime/LocalRuntime.ts new file mode 100644 index 0000000000..4b01216e81 --- /dev/null +++ b/src/runtime/LocalRuntime.ts @@ -0,0 +1,517 @@ +import { spawn } from "child_process"; +import * as fs from "fs"; +import * as fsPromises from "fs/promises"; +import * as path from "path"; +import { Readable, Writable } from "stream"; +import type { + Runtime, + ExecOptions, + ExecStream, + FileStat, + WorkspaceCreationParams, + WorkspaceCreationResult, + WorkspaceInitParams, + WorkspaceInitResult, + InitLogger, +} from "./Runtime"; +import { RuntimeError as RuntimeErrorClass } from "./Runtime"; +import { NON_INTERACTIVE_ENV_VARS } from "../constants/env"; +import { EXIT_CODE_ABORTED, EXIT_CODE_TIMEOUT } from "../constants/exitCodes"; +import { listLocalBranches } from "../git"; +import { checkInitHookExists, getInitHookPath, createLineBufferedLoggers } from "./initHook"; +import { execAsync } from "../utils/disposableExec"; +import { findBashPath, findNicePath } from "./executablePaths"; +import { getProjectName } from "../utils/runtime/helpers"; +import { getErrorMessage } from "../utils/errors"; + +/** + * Local runtime implementation that executes commands and file operations + * directly on the host machine using Node.js APIs. + */ +export class LocalRuntime implements Runtime { + private readonly srcBaseDir: string; + + constructor(srcBaseDir: string) { + this.srcBaseDir = srcBaseDir; + } + + async exec(command: string, options: ExecOptions): Promise { + const startTime = performance.now(); + + // Use the specified working directory (must be a specific workspace path) + const cwd = options.cwd; + + // Check if working directory exists before spawning + // This prevents confusing ENOENT errors from spawn() + try { + await fsPromises.access(cwd); + } catch (err) { + throw new RuntimeErrorClass( + `Working directory does not exist: ${cwd}`, + "exec", + err instanceof Error ? err : undefined + ); + } + + // Find bash path (important for CI environments where PATH may not be set) + const bashPath = findBashPath(); + const nicePath = findNicePath(); + + // If niceness is specified, spawn nice directly to avoid escaping issues + const spawnCommand = options.niceness !== undefined ? nicePath : bashPath; + const spawnArgs = + options.niceness !== undefined + ? ["-n", options.niceness.toString(), bashPath, "-c", command] + : ["-c", command]; + + const childProcess = spawn(spawnCommand, spawnArgs, { + cwd, + env: { + ...process.env, + ...(options.env ?? {}), + ...NON_INTERACTIVE_ENV_VARS, + }, + stdio: ["pipe", "pipe", "pipe"], + // CRITICAL: Spawn as detached process group leader to enable cleanup of background processes. + // When a bash script spawns background processes (e.g., `sleep 100 &`), we need to kill + // the entire process group (including all backgrounded children) via process.kill(-pid). + // NOTE: detached:true does NOT cause bash to wait for background jobs when using 'exit' event + // instead of 'close' event. The 'exit' event fires when bash exits, ignoring background children. + detached: true, + }); + + // Convert Node.js streams to Web Streams + const stdout = Readable.toWeb(childProcess.stdout) as unknown as ReadableStream; + const stderr = Readable.toWeb(childProcess.stderr) as unknown as ReadableStream; + const stdin = Writable.toWeb(childProcess.stdin) as unknown as WritableStream; + + // Track if we killed the process due to timeout + let timedOut = false; + + // Create promises for exit code and duration + // Uses special exit codes (EXIT_CODE_ABORTED, EXIT_CODE_TIMEOUT) for expected error conditions + const exitCode = new Promise((resolve, reject) => { + // Use 'exit' event instead of 'close' to handle background processes correctly. + // The 'close' event waits for ALL child processes (including background ones) to exit, + // which causes hangs when users spawn background processes like servers. + // The 'exit' event fires when the main bash process exits, which is what we want. + // + // However, stdio streams may not be fully flushed when 'exit' fires, so we need to: + // 1. Track when process exits and when streams close + // 2. Resolve immediately if streams have closed + // 3. Wait with a grace period (50ms) for streams to flush if they haven't closed yet + // 4. Force-close streams after grace period to prevent hangs + let stdoutClosed = false; + let stderrClosed = false; + let processExited = false; + let exitedCode: number | null = null; + + // Track stream closures + childProcess.stdout?.on("close", () => { + stdoutClosed = true; + tryResolve(); + }); + childProcess.stderr?.on("close", () => { + stderrClosed = true; + tryResolve(); + }); + + const tryResolve = () => { + // Only resolve if process has exited AND streams are closed + if (processExited && stdoutClosed && stderrClosed) { + finalizeExit(); + } + }; + + const finalizeExit = () => { + // Check abort first (highest priority) + if (options.abortSignal?.aborted) { + resolve(EXIT_CODE_ABORTED); + return; + } + // Check if we killed the process due to timeout + if (timedOut) { + resolve(EXIT_CODE_TIMEOUT); + return; + } + resolve(exitedCode ?? 0); + }; + + childProcess.on("exit", (code) => { + processExited = true; + exitedCode = code; + + // Clean up any background processes (process group cleanup) + // This prevents zombie processes when scripts spawn background tasks + if (childProcess.pid !== undefined) { + try { + // Kill entire process group with SIGKILL - cannot be caught/ignored + // Use negative PID to signal the entire process group + process.kill(-childProcess.pid, "SIGKILL"); + } catch { + // Process group already dead or doesn't exist - ignore + } + } + + // Try to resolve immediately if streams have already closed + tryResolve(); + + // Set a grace period timer - if streams don't close within 50ms, finalize anyway + // This handles background processes that keep stdio open + setTimeout(() => { + if (!stdoutClosed || !stderrClosed) { + // Mark streams as closed and finalize without destroying them + // Destroying converted Web Streams causes errors in the conversion layer + stdoutClosed = true; + stderrClosed = true; + finalizeExit(); + } + }, 50); + }); + + childProcess.on("error", (err) => { + reject(new RuntimeErrorClass(`Failed to execute command: ${err.message}`, "exec", err)); + }); + }); + + const duration = exitCode.then(() => performance.now() - startTime); + + // Helper to kill entire process group (including background children) + const killProcessGroup = () => { + if (childProcess.pid === undefined) return; + + try { + // Kill entire process group with SIGKILL - cannot be caught/ignored + process.kill(-childProcess.pid, "SIGKILL"); + } catch { + // Fallback: try killing just the main process + try { + childProcess.kill("SIGKILL"); + } catch { + // Process already dead - ignore + } + } + }; + + // Handle abort signal + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", killProcessGroup); + } + + // Handle timeout + if (options.timeout !== undefined) { + setTimeout(() => { + timedOut = true; + killProcessGroup(); + }, options.timeout * 1000); + } + + return { stdout, stderr, stdin, exitCode, duration }; + } + + readFile(filePath: string): ReadableStream { + const nodeStream = fs.createReadStream(filePath); + + // Handle errors by wrapping in a transform + const webStream = Readable.toWeb(nodeStream) as unknown as ReadableStream; + + return new ReadableStream({ + async start(controller: ReadableStreamDefaultController) { + try { + const reader = webStream.getReader(); + while (true) { + const { done, value } = await reader.read(); + if (done) break; + controller.enqueue(value); + } + controller.close(); + } catch (err) { + controller.error( + new RuntimeErrorClass( + `Failed to read file ${filePath}: ${err instanceof Error ? err.message : String(err)}`, + "file_io", + err instanceof Error ? err : undefined + ) + ); + } + }, + }); + } + + writeFile(filePath: string): WritableStream { + let tempPath: string; + let writer: WritableStreamDefaultWriter; + + return new WritableStream({ + async start() { + // Create parent directories if they don't exist + const parentDir = path.dirname(filePath); + await fsPromises.mkdir(parentDir, { recursive: true }); + + // Create temp file for atomic write + tempPath = `${filePath}.tmp.${Date.now()}`; + const nodeStream = fs.createWriteStream(tempPath); + const webStream = Writable.toWeb(nodeStream) as WritableStream; + writer = webStream.getWriter(); + }, + async write(chunk: Uint8Array) { + await writer.write(chunk); + }, + async close() { + // Close the writer and rename to final location + await writer.close(); + try { + await fsPromises.rename(tempPath, filePath); + } catch (err) { + throw new RuntimeErrorClass( + `Failed to write file ${filePath}: ${err instanceof Error ? err.message : String(err)}`, + "file_io", + err instanceof Error ? err : undefined + ); + } + }, + async abort(reason?: unknown) { + // Clean up temp file on abort + await writer.abort(); + try { + await fsPromises.unlink(tempPath); + } catch { + // Ignore errors cleaning up temp file + } + throw new RuntimeErrorClass( + `Failed to write file ${filePath}: ${String(reason)}`, + "file_io" + ); + }, + }); + } + + async stat(filePath: string): Promise { + try { + const stats = await fsPromises.stat(filePath); + return { + size: stats.size, + modifiedTime: stats.mtime, + isDirectory: stats.isDirectory(), + }; + } catch (err) { + throw new RuntimeErrorClass( + `Failed to stat ${filePath}: ${err instanceof Error ? err.message : String(err)}`, + "file_io", + err instanceof Error ? err : undefined + ); + } + } + + getWorkspacePath(projectPath: string, workspaceName: string): string { + const projectName = getProjectName(projectPath); + return path.join(this.srcBaseDir, projectName, workspaceName); + } + + async createWorkspace(params: WorkspaceCreationParams): Promise { + const { projectPath, branchName, trunkBranch, initLogger } = params; + + try { + // Compute workspace path using the canonical method + const workspacePath = this.getWorkspacePath(projectPath, branchName); + initLogger.logStep("Creating git worktree..."); + + // Create parent directory if needed + const parentDir = path.dirname(workspacePath); + // eslint-disable-next-line local/no-sync-fs-methods + if (!fs.existsSync(parentDir)) { + // eslint-disable-next-line local/no-sync-fs-methods + fs.mkdirSync(parentDir, { recursive: true }); + } + + // Check if workspace already exists + // eslint-disable-next-line local/no-sync-fs-methods + if (fs.existsSync(workspacePath)) { + return { + success: false, + error: `Workspace already exists at ${workspacePath}`, + }; + } + + // Check if branch exists locally + const localBranches = await listLocalBranches(projectPath); + const branchExists = localBranches.includes(branchName); + + // Create worktree (git worktree is typically fast) + if (branchExists) { + // Branch exists, just add worktree pointing to it + using proc = execAsync( + `git -C "${projectPath}" worktree add "${workspacePath}" "${branchName}"` + ); + await proc.result; + } else { + // Branch doesn't exist, create it from trunk + using proc = execAsync( + `git -C "${projectPath}" worktree add -b "${branchName}" "${workspacePath}" "${trunkBranch}"` + ); + await proc.result; + } + + initLogger.logStep("Worktree created successfully"); + + return { success: true, workspacePath }; + } catch (error) { + return { + success: false, + error: getErrorMessage(error), + }; + } + } + + async initWorkspace(params: WorkspaceInitParams): Promise { + const { projectPath, workspacePath, initLogger } = params; + + try { + // Run .cmux/init hook if it exists + // Note: runInitHook calls logComplete() internally if hook exists + const hookExists = await checkInitHookExists(projectPath); + if (hookExists) { + await this.runInitHook(projectPath, workspacePath, initLogger); + } else { + // No hook - signal completion immediately + initLogger.logComplete(0); + } + return { success: true }; + } catch (error) { + const errorMsg = getErrorMessage(error); + initLogger.logStderr(`Initialization failed: ${errorMsg}`); + initLogger.logComplete(-1); + return { + success: false, + error: errorMsg, + }; + } + } + + /** + * Run .cmux/init hook if it exists and is executable + */ + private async runInitHook( + projectPath: string, + workspacePath: string, + initLogger: InitLogger + ): Promise { + // Check if hook exists and is executable + const hookExists = await checkInitHookExists(projectPath); + if (!hookExists) { + return; + } + + const hookPath = getInitHookPath(projectPath); + initLogger.logStep(`Running init hook: ${hookPath}`); + + // Create line-buffered loggers + const loggers = createLineBufferedLoggers(initLogger); + + return new Promise((resolve) => { + const bashPath = findBashPath(); + const proc = spawn(bashPath, ["-c", `"${hookPath}"`], { + cwd: workspacePath, + stdio: ["ignore", "pipe", "pipe"], + }); + + proc.stdout.on("data", (data: Buffer) => { + loggers.stdout.append(data.toString()); + }); + + proc.stderr.on("data", (data: Buffer) => { + loggers.stderr.append(data.toString()); + }); + + proc.on("close", (code) => { + // Flush any remaining buffered output + loggers.stdout.flush(); + loggers.stderr.flush(); + + initLogger.logComplete(code ?? 0); + resolve(); + }); + + proc.on("error", (err) => { + initLogger.logStderr(`Error running init hook: ${err.message}`); + initLogger.logComplete(-1); + resolve(); + }); + }); + } + + async renameWorkspace( + projectPath: string, + oldName: string, + newName: string + ): Promise< + { success: true; oldPath: string; newPath: string } | { success: false; error: string } + > { + // Compute workspace paths using canonical method + const oldPath = this.getWorkspacePath(projectPath, oldName); + const newPath = this.getWorkspacePath(projectPath, newName); + + try { + // Use git worktree move to rename the worktree directory + // This updates git's internal worktree metadata correctly + using proc = execAsync(`git -C "${projectPath}" worktree move "${oldPath}" "${newPath}"`); + await proc.result; + + return { success: true, oldPath, newPath }; + } catch (error) { + return { success: false, error: `Failed to move worktree: ${getErrorMessage(error)}` }; + } + } + + async deleteWorkspace( + projectPath: string, + workspaceName: string, + force: boolean + ): Promise<{ success: true; deletedPath: string } | { success: false; error: string }> { + // Compute workspace path using the canonical method + const deletedPath = this.getWorkspacePath(projectPath, workspaceName); + + try { + // Use git worktree remove to delete the worktree + // This updates git's internal worktree metadata correctly + // Only use --force if explicitly requested by the caller + const forceFlag = force ? " --force" : ""; + using proc = execAsync( + `git -C "${projectPath}" worktree remove${forceFlag} "${deletedPath}"` + ); + await proc.result; + + return { success: true, deletedPath }; + } catch (error) { + const message = getErrorMessage(error); + + // If force is enabled and git worktree remove failed, fall back to rm -rf + // This handles edge cases like submodules where git refuses to delete + if (force) { + try { + // Prune git's worktree records first (best effort) + try { + using pruneProc = execAsync(`git -C "${projectPath}" worktree prune`); + await pruneProc.result; + } catch { + // Ignore prune errors - we'll still try rm -rf + } + + // Force delete the directory + using rmProc = execAsync(`rm -rf "${deletedPath}"`); + await rmProc.result; + + return { success: true, deletedPath }; + } catch (rmError) { + return { + success: false, + error: `Failed to remove worktree via git and rm: ${getErrorMessage(rmError)}`, + }; + } + } + + // force=false - return the git error without attempting rm -rf + return { success: false, error: `Failed to remove worktree: ${message}` }; + } + } +} diff --git a/src/runtime/Runtime.ts b/src/runtime/Runtime.ts new file mode 100644 index 0000000000..0bfd1af257 --- /dev/null +++ b/src/runtime/Runtime.ts @@ -0,0 +1,286 @@ +/** + * Runtime abstraction for executing tools in different environments. + * + * DESIGN PRINCIPLE: Keep this interface minimal and low-level. + * - Prefer streaming primitives over buffered APIs + * - Implement shared helpers (utils/runtime/) that work across all runtimes + * - Avoid duplicating helper logic in each runtime implementation + * + * This interface allows tools to run locally, in Docker containers, over SSH, etc. + */ + +/** + * PATH TERMINOLOGY & HIERARCHY + * + * srcBaseDir (base directory for all workspaces): + * - Where cmux stores ALL workspace directories + * - Local: ~/.cmux/src + * - SSH: /home/user/workspace (or custom remote path) + * + * Workspace Path Computation: + * {srcBaseDir}/{projectName}/{workspaceName} + * + * - projectName: basename(projectPath) + * Example: "/Users/me/git/my-project" → "my-project" + * + * - workspaceName: branch name or custom name + * Example: "feature-123" or "main" + * + * Full Example (Local): + * srcBaseDir: ~/.cmux/src + * projectPath: /Users/me/git/my-project (local git repo) + * projectName: my-project (extracted) + * workspaceName: feature-123 + * → Workspace: ~/.cmux/src/my-project/feature-123 + * + * Full Example (SSH): + * srcBaseDir: /home/user/workspace + * projectPath: /Users/me/git/my-project (local git repo) + * projectName: my-project (extracted) + * workspaceName: feature-123 + * → Workspace: /home/user/workspace/my-project/feature-123 + */ + +/** + * Options for executing a command + */ +export interface ExecOptions { + /** Working directory for command execution */ + cwd: string; + /** Environment variables to inject */ + env?: Record; + /** + * Timeout in seconds (REQUIRED) + * + * Prevents zombie processes by ensuring all spawned processes are eventually killed. + * Even long-running commands should have a reasonable upper bound (e.g., 3600s for 1 hour). + */ + timeout: number; + /** Process niceness level (-20 to 19, lower = higher priority) */ + niceness?: number; + /** Abort signal for cancellation */ + abortSignal?: AbortSignal; +} + +/** + * Streaming result from executing a command + */ +export interface ExecStream { + /** Standard output stream */ + stdout: ReadableStream; + /** Standard error stream */ + stderr: ReadableStream; + /** Standard input stream */ + stdin: WritableStream; + /** Promise that resolves with exit code when process completes */ + exitCode: Promise; + /** Promise that resolves with wall clock duration in milliseconds */ + duration: Promise; +} + +/** + * File statistics + */ +export interface FileStat { + /** File size in bytes */ + size: number; + /** Last modified time */ + modifiedTime: Date; + /** True if path is a directory (false implies regular file for our purposes) */ + isDirectory: boolean; +} + +/** + * Logger for streaming workspace initialization events to frontend. + * Used to report progress during workspace creation and init hook execution. + */ +export interface InitLogger { + /** Log a creation step (e.g., "Creating worktree", "Syncing files") */ + logStep(message: string): void; + /** Log stdout line from init hook */ + logStdout(line: string): void; + /** Log stderr line from init hook */ + logStderr(line: string): void; + /** Report init hook completion */ + logComplete(exitCode: number): void; +} + +/** + * Parameters for workspace creation + */ +export interface WorkspaceCreationParams { + /** Absolute path to project directory on local machine */ + projectPath: string; + /** Branch name to checkout in workspace */ + branchName: string; + /** Trunk branch to base new branches on */ + trunkBranch: string; + /** Directory name to use for workspace (typically branch name) */ + directoryName: string; + /** Logger for streaming creation progress and init hook output */ + initLogger: InitLogger; +} + +/** + * Result from workspace creation + */ +export interface WorkspaceCreationResult { + success: boolean; + /** Absolute path to workspace (local path for LocalRuntime, remote path for SSHRuntime) */ + workspacePath?: string; + error?: string; +} + +/** + * Parameters for workspace initialization + */ +export interface WorkspaceInitParams { + /** Absolute path to project directory on local machine */ + projectPath: string; + /** Branch name to checkout in workspace */ + branchName: string; + /** Trunk branch to base new branches on */ + trunkBranch: string; + /** Absolute path to workspace (from createWorkspace result) */ + workspacePath: string; + /** Logger for streaming initialization progress and output */ + initLogger: InitLogger; +} + +/** + * Result from workspace initialization + */ +export interface WorkspaceInitResult { + success: boolean; + error?: string; +} + +/** + * Runtime interface - minimal, low-level abstraction for tool execution environments. + * + * All methods return streaming primitives for memory efficiency. + * Use helpers in utils/runtime/ for convenience wrappers (e.g., readFileString, execBuffered). + */ +export interface Runtime { + /** + * Execute a bash command with streaming I/O + * @param command The bash script to execute + * @param options Execution options (cwd, env, timeout, etc.) + * @returns Promise that resolves to streaming handles for stdin/stdout/stderr and completion promises + * @throws RuntimeError if execution fails in an unrecoverable way + */ + exec(command: string, options: ExecOptions): Promise; + + /** + * Read file contents as a stream + * @param path Absolute or relative path to file + * @returns Readable stream of file contents + * @throws RuntimeError if file cannot be read + */ + readFile(path: string): ReadableStream; + + /** + * Write file contents atomically from a stream + * @param path Absolute or relative path to file + * @returns Writable stream for file contents + * @throws RuntimeError if file cannot be written + */ + writeFile(path: string): WritableStream; + + /** + * Get file statistics + * @param path Absolute or relative path to file/directory + * @returns File statistics + * @throws RuntimeError if path does not exist or cannot be accessed + */ + stat(path: string): Promise; + + /** + * Compute absolute workspace path from project and workspace name. + * This is the SINGLE source of truth for workspace path computation. + * + * - LocalRuntime: {workdir}/{project-name}/{workspace-name} + * - SSHRuntime: {workdir}/{project-name}/{workspace-name} + * + * All Runtime methods (create, delete, rename) MUST use this method internally + * to ensure consistent path computation. + * + * @param projectPath Project root path (local path, used to extract project name) + * @param workspaceName Workspace name (typically branch name) + * @returns Absolute path to workspace directory + */ + getWorkspacePath(projectPath: string, workspaceName: string): string; + + /** + * Create a workspace for this runtime (fast, returns immediately) + * - LocalRuntime: Creates git worktree + * - SSHRuntime: Creates remote directory only + * Does NOT run init hook or sync files. + * @param params Workspace creation parameters + * @returns Result with workspace path or error + */ + createWorkspace(params: WorkspaceCreationParams): Promise; + + /** + * Initialize workspace asynchronously (may be slow, streams progress) + * - LocalRuntime: Runs init hook if present + * - SSHRuntime: Syncs files, checks out branch, runs init hook + * Streams progress via initLogger. + * @param params Workspace initialization parameters + * @returns Result indicating success or error + */ + initWorkspace(params: WorkspaceInitParams): Promise; + + /** + * Rename workspace directory + * - LocalRuntime: Uses git worktree move (worktrees managed by git) + * - SSHRuntime: Uses mv (plain directories on remote, not worktrees) + * Runtime computes workspace paths internally from workdir + projectPath + workspace names. + * @param projectPath Project root path (local path, used for git commands in LocalRuntime and to extract project name) + * @param oldName Current workspace name + * @param newName New workspace name + * @returns Promise resolving to Result with old/new paths on success, or error message + */ + renameWorkspace( + projectPath: string, + oldName: string, + newName: string + ): Promise< + { success: true; oldPath: string; newPath: string } | { success: false; error: string } + >; + + /** + * Delete workspace directory + * - LocalRuntime: Uses git worktree remove (with --force only if force param is true) + * - SSHRuntime: Checks for uncommitted changes unless force is true, then uses rm -rf + * Runtime computes workspace path internally from workdir + projectPath + workspaceName. + * + * **CRITICAL: Implementations must NEVER auto-apply --force or skip dirty checks without explicit force=true.** + * If workspace has uncommitted changes and force=false, implementations MUST return error. + * The force flag is the user's explicit intent - implementations must not override it. + * + * @param projectPath Project root path (local path, used for git commands in LocalRuntime and to extract project name) + * @param workspaceName Workspace name to delete + * @param force If true, force deletion even with uncommitted changes or special conditions (submodules, etc.) + * @returns Promise resolving to Result with deleted path on success, or error message + */ + deleteWorkspace( + projectPath: string, + workspaceName: string, + force: boolean + ): Promise<{ success: true; deletedPath: string } | { success: false; error: string }>; +} + +/** + * Error thrown by runtime implementations + */ +export class RuntimeError extends Error { + constructor( + message: string, + public readonly type: "exec" | "file_io" | "network" | "unknown", + public readonly cause?: Error + ) { + super(message); + this.name = "RuntimeError"; + } +} diff --git a/src/runtime/SSHRuntime.ts b/src/runtime/SSHRuntime.ts new file mode 100644 index 0000000000..7c19f3d27d --- /dev/null +++ b/src/runtime/SSHRuntime.ts @@ -0,0 +1,845 @@ +import { spawn } from "child_process"; +import { Readable, Writable } from "stream"; +import * as path from "path"; +import * as os from "os"; +import * as crypto from "crypto"; +import { Shescape } from "shescape"; +import type { + Runtime, + ExecOptions, + ExecStream, + FileStat, + WorkspaceCreationParams, + WorkspaceCreationResult, + WorkspaceInitParams, + WorkspaceInitResult, + InitLogger, +} from "./Runtime"; +import { RuntimeError as RuntimeErrorClass } from "./Runtime"; +import { EXIT_CODE_ABORTED, EXIT_CODE_TIMEOUT } from "../constants/exitCodes"; +import { log } from "../services/log"; +import { checkInitHookExists, createLineBufferedLoggers } from "./initHook"; +import { streamProcessToLogger } from "./streamProcess"; +import { expandTildeForSSH, cdCommandForSSH } from "./tildeExpansion"; +import { findBashPath } from "./executablePaths"; +import { getProjectName } from "../utils/runtime/helpers"; +import { getErrorMessage } from "../utils/errors"; + +/** + * Shescape instance for bash shell escaping. + * Reused across all SSH runtime operations for performance. + */ +const shescape = new Shescape({ shell: "/bin/bash" }); + +/** + * SSH Runtime Configuration + */ +export interface SSHRuntimeConfig { + /** SSH host (can be hostname, user@host, or SSH config alias) */ + host: string; + /** Working directory on remote host */ + srcBaseDir: string; + /** Optional: Path to SSH private key (if not using ~/.ssh/config or ssh-agent) */ + identityFile?: string; + /** Optional: SSH port (default: 22) */ + port?: number; +} + +/** + * SSH runtime implementation that executes commands and file operations + * over SSH using the ssh command-line tool. + * + * Features: + * - Uses system ssh command (respects ~/.ssh/config) + * - Supports SSH config aliases, ProxyJump, ControlMaster, etc. + * - No password prompts (assumes key-based auth or ssh-agent) + * - Atomic file writes via temp + rename + */ +export class SSHRuntime implements Runtime { + private readonly config: SSHRuntimeConfig; + private readonly controlPath: string; + + constructor(config: SSHRuntimeConfig) { + this.config = config; + // Generate unique control path for SSH connection multiplexing + // This allows multiple SSH sessions to reuse a single TCP connection + const randomId = crypto.randomBytes(8).toString("hex"); + this.controlPath = path.join(os.tmpdir(), `cmux-ssh-${randomId}`); + } + + /** + * Execute command over SSH with streaming I/O + */ + // eslint-disable-next-line @typescript-eslint/require-await + async exec(command: string, options: ExecOptions): Promise { + const startTime = performance.now(); + + // Build command parts + const parts: string[] = []; + + // Add cd command if cwd is specified + parts.push(cdCommandForSSH(options.cwd)); + + // Add environment variable exports + if (options.env) { + for (const [key, value] of Object.entries(options.env)) { + parts.push(`export ${key}=${shescape.quote(value)}`); + } + } + + // Add the actual command + parts.push(command); + + // Join all parts with && to ensure each step succeeds before continuing + const fullCommand = parts.join(" && "); + + // Wrap in bash -c with shescape for safe shell execution + const remoteCommand = `bash -c ${shescape.quote(fullCommand)}`; + + // Build SSH args + const sshArgs: string[] = ["-T"]; + + // Add port if specified + if (this.config.port) { + sshArgs.push("-p", this.config.port.toString()); + } + + // Add identity file if specified + if (this.config.identityFile) { + sshArgs.push("-i", this.config.identityFile); + // Disable strict host key checking for test environments + sshArgs.push("-o", "StrictHostKeyChecking=no"); + sshArgs.push("-o", "UserKnownHostsFile=/dev/null"); + sshArgs.push("-o", "LogLevel=ERROR"); // Suppress SSH warnings + } + + // Enable SSH connection multiplexing for better performance and to avoid + // exhausting connection limits when running many concurrent operations + // ControlMaster=auto: Create master connection if none exists, otherwise reuse + // ControlPath: Unix socket path for multiplexing + // ControlPersist=60: Keep master connection alive for 60s after last session + sshArgs.push("-o", "ControlMaster=auto"); + sshArgs.push("-o", `ControlPath=${this.controlPath}`); + sshArgs.push("-o", "ControlPersist=60"); + + sshArgs.push(this.config.host, remoteCommand); + + // Debug: log the actual SSH command being executed + log.debug(`SSH command: ssh ${sshArgs.join(" ")}`); + log.debug(`Remote command: ${remoteCommand}`); + + // Spawn ssh command + const sshProcess = spawn("ssh", sshArgs, { + stdio: ["pipe", "pipe", "pipe"], + }); + + // Convert Node.js streams to Web Streams + const stdout = Readable.toWeb(sshProcess.stdout) as unknown as ReadableStream; + const stderr = Readable.toWeb(sshProcess.stderr) as unknown as ReadableStream; + const stdin = Writable.toWeb(sshProcess.stdin) as unknown as WritableStream; + + // Track if we killed the process due to timeout + let timedOut = false; + + // Create promises for exit code and duration + // Uses special exit codes (EXIT_CODE_ABORTED, EXIT_CODE_TIMEOUT) for expected error conditions + const exitCode = new Promise((resolve, reject) => { + sshProcess.on("close", (code, signal) => { + // Check abort first (highest priority) + if (options.abortSignal?.aborted) { + resolve(EXIT_CODE_ABORTED); + return; + } + // Check if we killed the process due to timeout + // Don't check signal - if we set timedOut, we timed out regardless of how process died + if (timedOut) { + resolve(EXIT_CODE_TIMEOUT); + return; + } + resolve(code ?? (signal ? -1 : 0)); + }); + + sshProcess.on("error", (err) => { + reject(new RuntimeErrorClass(`Failed to execute SSH command: ${err.message}`, "exec", err)); + }); + }); + + const duration = exitCode.then(() => performance.now() - startTime); + + // Handle abort signal + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", () => sshProcess.kill()); + } + + // Handle timeout + if (options.timeout !== undefined) { + setTimeout(() => { + timedOut = true; + sshProcess.kill(); + }, options.timeout * 1000); + } + + return { stdout, stderr, stdin, exitCode, duration }; + } + + /** + * Read file contents over SSH as a stream + */ + readFile(path: string): ReadableStream { + // Return stdout, but wrap to handle errors from exec() and exit code + return new ReadableStream({ + start: async (controller: ReadableStreamDefaultController) => { + try { + const stream = await this.exec(`cat ${shescape.quote(path)}`, { + cwd: this.config.srcBaseDir, + timeout: 300, // 5 minutes - reasonable for large files + }); + + const reader = stream.stdout.getReader(); + const exitCode = stream.exitCode; + + // Read all chunks + while (true) { + const { done, value } = await reader.read(); + if (done) break; + controller.enqueue(value); + } + + // Check exit code after reading completes + const code = await exitCode; + if (code !== 0) { + const stderr = await streamToString(stream.stderr); + throw new RuntimeErrorClass(`Failed to read file ${path}: ${stderr}`, "file_io"); + } + + controller.close(); + } catch (err) { + if (err instanceof RuntimeErrorClass) { + controller.error(err); + } else { + controller.error( + new RuntimeErrorClass( + `Failed to read file ${path}: ${err instanceof Error ? err.message : String(err)}`, + "file_io", + err instanceof Error ? err : undefined + ) + ); + } + } + }, + }); + } + + /** + * Write file contents over SSH atomically from a stream + */ + writeFile(path: string): WritableStream { + const tempPath = `${path}.tmp.${Date.now()}`; + // Create parent directory if needed, then write file atomically + // Use shescape.quote for safe path escaping + const writeCommand = `mkdir -p $(dirname ${shescape.quote(path)}) && cat > ${shescape.quote(tempPath)} && chmod 600 ${shescape.quote(tempPath)} && mv ${shescape.quote(tempPath)} ${shescape.quote(path)}`; + + // Need to get the exec stream in async callbacks + let execPromise: Promise | null = null; + + const getExecStream = () => { + execPromise ??= this.exec(writeCommand, { + cwd: this.config.srcBaseDir, + timeout: 300, // 5 minutes - reasonable for large files + }); + return execPromise; + }; + + // Wrap stdin to handle errors from exit code + return new WritableStream({ + write: async (chunk: Uint8Array) => { + const stream = await getExecStream(); + const writer = stream.stdin.getWriter(); + try { + await writer.write(chunk); + } finally { + writer.releaseLock(); + } + }, + close: async () => { + const stream = await getExecStream(); + // Close stdin and wait for command to complete + await stream.stdin.close(); + const exitCode = await stream.exitCode; + + if (exitCode !== 0) { + const stderr = await streamToString(stream.stderr); + throw new RuntimeErrorClass(`Failed to write file ${path}: ${stderr}`, "file_io"); + } + }, + abort: async (reason?: unknown) => { + const stream = await getExecStream(); + await stream.stdin.abort(); + throw new RuntimeErrorClass(`Failed to write file ${path}: ${String(reason)}`, "file_io"); + }, + }); + } + + /** + * Get file statistics over SSH + */ + async stat(path: string): Promise { + // Use stat with format string to get: size, mtime, type + // %s = size, %Y = mtime (seconds since epoch), %F = file type + const stream = await this.exec(`stat -c '%s %Y %F' ${shescape.quote(path)}`, { + cwd: this.config.srcBaseDir, + timeout: 10, // 10 seconds - stat should be fast + }); + + const [stdout, stderr, exitCode] = await Promise.all([ + streamToString(stream.stdout), + streamToString(stream.stderr), + stream.exitCode, + ]); + + if (exitCode !== 0) { + throw new RuntimeErrorClass(`Failed to stat ${path}: ${stderr}`, "file_io"); + } + + const parts = stdout.trim().split(" "); + if (parts.length < 3) { + throw new RuntimeErrorClass(`Failed to parse stat output for ${path}: ${stdout}`, "file_io"); + } + + const size = parseInt(parts[0], 10); + const mtime = parseInt(parts[1], 10); + const fileType = parts.slice(2).join(" "); + + return { + size, + modifiedTime: new Date(mtime * 1000), + isDirectory: fileType === "directory", + }; + } + + /** + * Build common SSH arguments based on runtime config + * @param includeHost - Whether to include the host in the args (for direct ssh commands) + */ + private buildSSHArgs(includeHost = false): string[] { + const args: string[] = []; + + // Add port if specified + if (this.config.port) { + args.push("-p", this.config.port.toString()); + } + + // Add identity file if specified + if (this.config.identityFile) { + args.push("-i", this.config.identityFile); + // Disable strict host key checking for test environments + args.push("-o", "StrictHostKeyChecking=no"); + args.push("-o", "UserKnownHostsFile=/dev/null"); + args.push("-o", "LogLevel=ERROR"); + } + + if (includeHost) { + args.push(this.config.host); + } + + return args; + } + + /** + * Sync project to remote using git bundle + * + * Uses `git bundle` to create a packfile and clones it on the remote. + * + * Benefits over git archive: + * - Creates a real git repository on remote (can run git commands) + * - Better parity with git worktrees (full .git directory with metadata) + * - Enables remote git operations (commit, branch, status, diff, etc.) + * - Only tracked files in checkout (no node_modules, build artifacts) + * - Includes full history for flexibility + * + * Benefits over rsync/scp: + * - Much faster (only tracked files) + * - No external dependencies (git is always available) + * - Simpler implementation + */ + private async syncProjectToRemote( + projectPath: string, + workspacePath: string, + initLogger: InitLogger + ): Promise { + // Use timestamp-based bundle path to avoid conflicts (simpler than $$) + const timestamp = Date.now(); + const bundleTempPath = `~/.cmux-bundle-${timestamp}.bundle`; + + try { + // Step 1: Create bundle locally and pipe to remote file via SSH + initLogger.logStep(`Creating git bundle...`); + await new Promise((resolve, reject) => { + const sshArgs = this.buildSSHArgs(true); + const command = `cd ${JSON.stringify(projectPath)} && git bundle create - --all | ssh ${sshArgs.join(" ")} "cat > ${bundleTempPath}"`; + + log.debug(`Creating bundle: ${command}`); + const bashPath = findBashPath(); + const proc = spawn(bashPath, ["-c", command]); + + streamProcessToLogger(proc, initLogger, { + logStdout: false, + logStderr: true, + }); + + let stderr = ""; + proc.stderr.on("data", (data: Buffer) => { + stderr += data.toString(); + }); + + proc.on("close", (code) => { + if (code === 0) { + resolve(); + } else { + reject(new Error(`Failed to create bundle: ${stderr}`)); + } + }); + + proc.on("error", (err) => { + reject(err); + }); + }); + + // Step 2: Clone from bundle on remote using this.exec + initLogger.logStep(`Cloning repository on remote...`); + + // Expand tilde in destination path for git clone + // git doesn't expand tilde when it's quoted, so we need to expand it ourselves + const cloneDestPath = expandTildeForSSH(workspacePath); + + const cloneStream = await this.exec(`git clone --quiet ${bundleTempPath} ${cloneDestPath}`, { + cwd: "~", + timeout: 300, // 5 minutes for clone + }); + + const [cloneStdout, cloneStderr, cloneExitCode] = await Promise.all([ + streamToString(cloneStream.stdout), + streamToString(cloneStream.stderr), + cloneStream.exitCode, + ]); + + if (cloneExitCode !== 0) { + throw new Error(`Failed to clone repository: ${cloneStderr || cloneStdout}`); + } + + // Step 3: Remove bundle file + initLogger.logStep(`Cleaning up bundle file...`); + const rmStream = await this.exec(`rm ${bundleTempPath}`, { + cwd: "~", + timeout: 10, + }); + + const rmExitCode = await rmStream.exitCode; + if (rmExitCode !== 0) { + log.info(`Failed to remove bundle file ${bundleTempPath}, but continuing`); + } + + initLogger.logStep(`Repository cloned successfully`); + } catch (error) { + // Try to clean up bundle file on error + try { + const rmStream = await this.exec(`rm -f ${bundleTempPath}`, { + cwd: "~", + timeout: 10, + }); + await rmStream.exitCode; + } catch { + // Ignore cleanup errors + } + + throw error; + } + } + + /** + * Run .cmux/init hook on remote machine if it exists + */ + private async runInitHook( + projectPath: string, + workspacePath: string, + initLogger: InitLogger + ): Promise { + // Check if hook exists locally (we synced the project, so local check is sufficient) + const hookExists = await checkInitHookExists(projectPath); + if (!hookExists) { + return; + } + + // Construct hook path - expand tilde if present + const remoteHookPath = `${workspacePath}/.cmux/init`; + initLogger.logStep(`Running init hook: ${remoteHookPath}`); + + // Expand tilde in hook path for execution + // Tilde won't be expanded when the path is quoted, so we need to expand it ourselves + const hookCommand = expandTildeForSSH(remoteHookPath); + + // Run hook remotely and stream output + // No timeout - user init hooks can be arbitrarily long + const hookStream = await this.exec(hookCommand, { + cwd: workspacePath, // Run in the workspace directory + timeout: 3600, // 1 hour - generous timeout for init hooks + }); + + // Create line-buffered loggers + const loggers = createLineBufferedLoggers(initLogger); + + // Stream stdout/stderr through line-buffered loggers + const stdoutReader = hookStream.stdout.getReader(); + const stderrReader = hookStream.stderr.getReader(); + const decoder = new TextDecoder(); + + // Read stdout in parallel + const readStdout = async () => { + try { + while (true) { + const { done, value } = await stdoutReader.read(); + if (done) break; + loggers.stdout.append(decoder.decode(value, { stream: true })); + } + loggers.stdout.flush(); + } finally { + stdoutReader.releaseLock(); + } + }; + + // Read stderr in parallel + const readStderr = async () => { + try { + while (true) { + const { done, value } = await stderrReader.read(); + if (done) break; + loggers.stderr.append(decoder.decode(value, { stream: true })); + } + loggers.stderr.flush(); + } finally { + stderrReader.releaseLock(); + } + }; + + // Wait for completion + const [exitCode] = await Promise.all([hookStream.exitCode, readStdout(), readStderr()]); + + initLogger.logComplete(exitCode); + } + + getWorkspacePath(projectPath: string, workspaceName: string): string { + const projectName = getProjectName(projectPath); + return path.posix.join(this.config.srcBaseDir, projectName, workspaceName); + } + + async createWorkspace(params: WorkspaceCreationParams): Promise { + try { + const { projectPath, branchName, initLogger } = params; + // Compute workspace path using canonical method + const workspacePath = this.getWorkspacePath(projectPath, branchName); + + // Prepare parent directory for git clone (fast - returns immediately) + // Note: git clone will create the workspace directory itself during initWorkspace, + // but the parent directory must exist first + initLogger.logStep("Preparing remote workspace..."); + try { + // Extract parent directory from workspace path + // Example: ~/workspace/project/branch -> ~/workspace/project + const lastSlash = workspacePath.lastIndexOf("/"); + const parentDir = lastSlash > 0 ? workspacePath.substring(0, lastSlash) : "~"; + + // Expand tilde for mkdir command + const expandedParentDir = expandTildeForSSH(parentDir); + const parentDirCommand = `mkdir -p ${expandedParentDir}`; + + const mkdirStream = await this.exec(parentDirCommand, { + cwd: "/tmp", + timeout: 10, + }); + const mkdirExitCode = await mkdirStream.exitCode; + if (mkdirExitCode !== 0) { + const stderr = await streamToString(mkdirStream.stderr); + return { + success: false, + error: `Failed to prepare remote workspace: ${stderr}`, + }; + } + } catch (error) { + return { + success: false, + error: `Failed to prepare remote workspace: ${getErrorMessage(error)}`, + }; + } + + initLogger.logStep("Remote workspace prepared"); + + return { + success: true, + workspacePath, + }; + } catch (error) { + return { + success: false, + error: getErrorMessage(error), + }; + } + } + + async initWorkspace(params: WorkspaceInitParams): Promise { + const { + projectPath, + branchName, + trunkBranch: _trunkBranch, + workspacePath, + initLogger, + } = params; + + try { + // 1. Sync project to remote (opportunistic rsync with scp fallback) + initLogger.logStep("Syncing project files to remote..."); + try { + await this.syncProjectToRemote(projectPath, workspacePath, initLogger); + } catch (error) { + const errorMsg = getErrorMessage(error); + initLogger.logStderr(`Failed to sync project: ${errorMsg}`); + initLogger.logComplete(-1); + return { + success: false, + error: `Failed to sync project: ${errorMsg}`, + }; + } + initLogger.logStep("Files synced successfully"); + + // 2. Checkout branch remotely + // Note: After git clone, HEAD is already checked out to the default branch from the bundle + // We create new branches from HEAD instead of the trunkBranch name to avoid issues + // where the local repo's trunk name doesn't match the cloned repo's default branch + initLogger.logStep(`Checking out branch: ${branchName}`); + const checkoutCmd = `(git checkout ${JSON.stringify(branchName)} 2>/dev/null || git checkout -b ${JSON.stringify(branchName)} HEAD)`; + + const checkoutStream = await this.exec(checkoutCmd, { + cwd: workspacePath, // Use the full workspace path for git operations + timeout: 300, // 5 minutes for git checkout (can be slow on large repos) + }); + + const [stdout, stderr, exitCode] = await Promise.all([ + streamToString(checkoutStream.stdout), + streamToString(checkoutStream.stderr), + checkoutStream.exitCode, + ]); + + if (exitCode !== 0) { + const errorMsg = `Failed to checkout branch: ${stderr || stdout}`; + initLogger.logStderr(errorMsg); + initLogger.logComplete(-1); + return { + success: false, + error: errorMsg, + }; + } + initLogger.logStep("Branch checked out successfully"); + + // 3. Run .cmux/init hook if it exists + // Note: runInitHook calls logComplete() internally if hook exists + const hookExists = await checkInitHookExists(projectPath); + if (hookExists) { + await this.runInitHook(projectPath, workspacePath, initLogger); + } else { + // No hook - signal completion immediately + initLogger.logComplete(0); + } + + return { success: true }; + } catch (error) { + const errorMsg = getErrorMessage(error); + initLogger.logStderr(`Initialization failed: ${errorMsg}`); + initLogger.logComplete(-1); + return { + success: false, + error: errorMsg, + }; + } + } + + async renameWorkspace( + projectPath: string, + oldName: string, + newName: string + ): Promise< + { success: true; oldPath: string; newPath: string } | { success: false; error: string } + > { + // Compute workspace paths using canonical method + const oldPath = this.getWorkspacePath(projectPath, oldName); + const newPath = this.getWorkspacePath(projectPath, newName); + + try { + // SSH runtimes use plain directories, not git worktrees + // Expand tilde and quote paths (expandTildeForSSH handles both expansion and quoting) + const expandedOldPath = expandTildeForSSH(oldPath); + const expandedNewPath = expandTildeForSSH(newPath); + + // Just use mv to rename the directory on the remote host + const moveCommand = `mv ${expandedOldPath} ${expandedNewPath}`; + + // Execute via the runtime's exec method (handles SSH connection multiplexing, etc.) + const stream = await this.exec(moveCommand, { + cwd: this.config.srcBaseDir, + timeout: 30, + }); + + await stream.stdin.close(); + const exitCode = await stream.exitCode; + + if (exitCode !== 0) { + // Read stderr for error message + const stderrReader = stream.stderr.getReader(); + const decoder = new TextDecoder(); + let stderr = ""; + try { + while (true) { + const { done, value } = await stderrReader.read(); + if (done) break; + stderr += decoder.decode(value, { stream: true }); + } + } finally { + stderrReader.releaseLock(); + } + return { + success: false, + error: `Failed to rename directory: ${stderr || "Unknown error"}`, + }; + } + + return { success: true, oldPath, newPath }; + } catch (error) { + return { success: false, error: `Failed to rename directory: ${getErrorMessage(error)}` }; + } + } + + async deleteWorkspace( + projectPath: string, + workspaceName: string, + force: boolean + ): Promise<{ success: true; deletedPath: string } | { success: false; error: string }> { + // Compute workspace path using canonical method + const deletedPath = this.getWorkspacePath(projectPath, workspaceName); + + try { + // Check if workspace exists first + const checkExistStream = await this.exec(`test -d ${shescape.quote(deletedPath)}`, { + cwd: this.config.srcBaseDir, + timeout: 10, + }); + + await checkExistStream.stdin.close(); + const existsExitCode = await checkExistStream.exitCode; + + // If directory doesn't exist, deletion is a no-op (success) + if (existsExitCode !== 0) { + return { success: true, deletedPath }; + } + + // Check if workspace has uncommitted changes (unless force is true) + if (!force) { + // Check for uncommitted changes using git diff + const checkStream = await this.exec( + `cd ${shescape.quote(deletedPath)} && git diff --quiet --exit-code && git diff --quiet --cached --exit-code`, + { + cwd: this.config.srcBaseDir, + timeout: 10, + } + ); + + await checkStream.stdin.close(); + const checkExitCode = await checkStream.exitCode; + + if (checkExitCode !== 0) { + // Workspace has uncommitted changes + return { + success: false, + error: `Workspace contains uncommitted changes. Use force flag to delete anyway.`, + }; + } + } + + // SSH runtimes use plain directories, not git worktrees + // Use rm -rf to remove the directory on the remote host + const removeCommand = `rm -rf ${shescape.quote(deletedPath)}`; + + // Execute via the runtime's exec method (handles SSH connection multiplexing, etc.) + const stream = await this.exec(removeCommand, { + cwd: this.config.srcBaseDir, + timeout: 30, + }); + + await stream.stdin.close(); + const exitCode = await stream.exitCode; + + if (exitCode !== 0) { + // Read stderr for error message + const stderrReader = stream.stderr.getReader(); + const decoder = new TextDecoder(); + let stderr = ""; + try { + while (true) { + const { done, value } = await stderrReader.read(); + if (done) break; + stderr += decoder.decode(value, { stream: true }); + } + } finally { + stderrReader.releaseLock(); + } + return { + success: false, + error: `Failed to delete directory: ${stderr || "Unknown error"}`, + }; + } + + return { success: true, deletedPath }; + } catch (error) { + return { success: false, error: `Failed to delete directory: ${getErrorMessage(error)}` }; + } + } + + /** + * Cleanup SSH control socket on disposal + * Note: ControlPersist will automatically close the master connection after timeout, + * but we try to clean up immediately for good hygiene + */ + dispose(): void { + try { + // Send exit command to master connection (if it exists) + // This is a best-effort cleanup - the socket will auto-cleanup anyway + const exitArgs = ["-O", "exit", "-o", `ControlPath=${this.controlPath}`, this.config.host]; + + const exitProc = spawn("ssh", exitArgs, { stdio: "ignore" }); + + // Don't wait for it - fire and forget + exitProc.unref(); + } catch (error) { + // Ignore errors - control socket will timeout naturally + log.debug(`SSH control socket cleanup failed (non-fatal): ${getErrorMessage(error)}`); + } + } +} + +/** + * Helper to convert a ReadableStream to a string + */ +async function streamToString(stream: ReadableStream): Promise { + const reader = stream.getReader(); + const decoder = new TextDecoder("utf-8"); + let result = ""; + + try { + while (true) { + const { done, value } = await reader.read(); + if (done) break; + result += decoder.decode(value, { stream: true }); + } + result += decoder.decode(); + return result; + } finally { + reader.releaseLock(); + } +} diff --git a/src/runtime/executablePaths.ts b/src/runtime/executablePaths.ts new file mode 100644 index 0000000000..9744eb9564 --- /dev/null +++ b/src/runtime/executablePaths.ts @@ -0,0 +1,57 @@ +/** + * Utilities for finding executable paths + * + * In CI and some containerized environments, PATH may not be set correctly + * for spawned child processes. This module provides reliable ways to find + * common executables by checking standard locations. + */ + +import { existsSync } from "fs"; + +/** + * Find the bash executable path. + * Checks common locations and falls back to "bash" if not found. + * + * @returns Full path to bash executable, or "bash" as fallback + */ +export function findBashPath(): string { + // Common bash locations (ordered by preference) + const commonPaths = [ + "/bin/bash", // Most Linux systems + "/usr/bin/bash", // Some Unix systems + "/usr/local/bin/bash", // Homebrew on macOS + ]; + + for (const path of commonPaths) { + if (existsSync(path)) { + return path; + } + } + + // Fallback to "bash" and rely on PATH + return "bash"; +} + +/** + * Find the nice executable path. + * Checks common locations and falls back to "nice" if not found. + * + * @returns Full path to nice executable, or "nice" as fallback + */ +export function findNicePath(): string { + // Common nice locations (ordered by preference) + const commonPaths = [ + "/usr/bin/nice", // Most Linux systems + "/bin/nice", // Some Unix systems + "/usr/local/bin/nice", // Homebrew on macOS + ]; + + for (const path of commonPaths) { + if (existsSync(path)) { + return path; + } + } + + // Fallback to "nice" and rely on PATH + return "nice"; +} diff --git a/src/runtime/initHook.test.ts b/src/runtime/initHook.test.ts new file mode 100644 index 0000000000..d591678d45 --- /dev/null +++ b/src/runtime/initHook.test.ts @@ -0,0 +1,111 @@ +import { describe, it, expect } from "bun:test"; +import { LineBuffer, createLineBufferedLoggers } from "./initHook"; +import type { InitLogger } from "./Runtime"; + +describe("LineBuffer", () => { + it("should buffer incomplete lines", () => { + const lines: string[] = []; + const buffer = new LineBuffer((line) => lines.push(line)); + + buffer.append("hello "); + expect(lines).toEqual([]); + + buffer.append("world\n"); + expect(lines).toEqual(["hello world"]); + }); + + it("should handle multiple lines in one chunk", () => { + const lines: string[] = []; + const buffer = new LineBuffer((line) => lines.push(line)); + + buffer.append("line1\nline2\nline3\n"); + expect(lines).toEqual(["line1", "line2", "line3"]); + }); + + it("should handle incomplete line at end", () => { + const lines: string[] = []; + const buffer = new LineBuffer((line) => lines.push(line)); + + buffer.append("line1\nline2\nincomplete"); + expect(lines).toEqual(["line1", "line2"]); + + buffer.flush(); + expect(lines).toEqual(["line1", "line2", "incomplete"]); + }); + + it("should skip empty lines", () => { + const lines: string[] = []; + const buffer = new LineBuffer((line) => lines.push(line)); + + buffer.append("\nline1\n\nline2\n\n"); + expect(lines).toEqual(["line1", "line2"]); + }); + + it("should handle flush with no buffered data", () => { + const lines: string[] = []; + const buffer = new LineBuffer((line) => lines.push(line)); + + buffer.append("line1\n"); + expect(lines).toEqual(["line1"]); + + buffer.flush(); + expect(lines).toEqual(["line1"]); // No change + }); +}); + +describe("createLineBufferedLoggers", () => { + it("should create separate buffers for stdout and stderr", () => { + const stdoutLines: string[] = []; + const stderrLines: string[] = []; + + const mockLogger: InitLogger = { + logStep: () => { + /* no-op for test */ + }, + logStdout: (line) => stdoutLines.push(line), + logStderr: (line) => stderrLines.push(line), + logComplete: () => { + /* no-op for test */ + }, + }; + + const loggers = createLineBufferedLoggers(mockLogger); + + loggers.stdout.append("out1\nout2\n"); + loggers.stderr.append("err1\nerr2\n"); + + expect(stdoutLines).toEqual(["out1", "out2"]); + expect(stderrLines).toEqual(["err1", "err2"]); + }); + + it("should handle incomplete lines and flush separately", () => { + const stdoutLines: string[] = []; + const stderrLines: string[] = []; + + const mockLogger: InitLogger = { + logStep: () => { + /* no-op for test */ + }, + logStdout: (line) => stdoutLines.push(line), + logStderr: (line) => stderrLines.push(line), + logComplete: () => { + /* no-op for test */ + }, + }; + + const loggers = createLineBufferedLoggers(mockLogger); + + loggers.stdout.append("incomplete"); + loggers.stderr.append("also incomplete"); + + expect(stdoutLines).toEqual([]); + expect(stderrLines).toEqual([]); + + loggers.stdout.flush(); + expect(stdoutLines).toEqual(["incomplete"]); + expect(stderrLines).toEqual([]); // stderr not flushed yet + + loggers.stderr.flush(); + expect(stderrLines).toEqual(["also incomplete"]); + }); +}); diff --git a/src/runtime/initHook.ts b/src/runtime/initHook.ts new file mode 100644 index 0000000000..401b71f009 --- /dev/null +++ b/src/runtime/initHook.ts @@ -0,0 +1,82 @@ +import * as fs from "fs"; +import * as fsPromises from "fs/promises"; +import * as path from "path"; +import type { InitLogger } from "./Runtime"; + +/** + * Check if .cmux/init hook exists and is executable + * @param projectPath - Path to the project root + * @returns true if hook exists and is executable, false otherwise + */ +export async function checkInitHookExists(projectPath: string): Promise { + const hookPath = path.join(projectPath, ".cmux", "init"); + + try { + await fsPromises.access(hookPath, fs.constants.X_OK); + return true; + } catch { + return false; + } +} + +/** + * Get the init hook path for a project + */ +export function getInitHookPath(projectPath: string): string { + return path.join(projectPath, ".cmux", "init"); +} + +/** + * Line-buffered logger that splits stream output into lines and logs them + * Handles incomplete lines by buffering until a newline is received + */ +export class LineBuffer { + private buffer = ""; + private readonly logLine: (line: string) => void; + + constructor(logLine: (line: string) => void) { + this.logLine = logLine; + } + + /** + * Process a chunk of data, splitting on newlines and logging complete lines + */ + append(data: string): void { + this.buffer += data; + const lines = this.buffer.split("\n"); + this.buffer = lines.pop() ?? ""; // Keep last incomplete line + for (const line of lines) { + if (line) this.logLine(line); + } + } + + /** + * Flush any remaining buffered data (called when stream closes) + */ + flush(): void { + if (this.buffer) { + this.logLine(this.buffer); + this.buffer = ""; + } + } +} + +/** + * Create line-buffered loggers for stdout and stderr + * Returns an object with append and flush methods for each stream + */ +export function createLineBufferedLoggers(initLogger: InitLogger) { + const stdoutBuffer = new LineBuffer((line) => initLogger.logStdout(line)); + const stderrBuffer = new LineBuffer((line) => initLogger.logStderr(line)); + + return { + stdout: { + append: (data: string) => stdoutBuffer.append(data), + flush: () => stdoutBuffer.flush(), + }, + stderr: { + append: (data: string) => stderrBuffer.append(data), + flush: () => stderrBuffer.flush(), + }, + }; +} diff --git a/src/runtime/runtimeFactory.ts b/src/runtime/runtimeFactory.ts new file mode 100644 index 0000000000..33de00a37e --- /dev/null +++ b/src/runtime/runtimeFactory.ts @@ -0,0 +1,27 @@ +import type { Runtime } from "./Runtime"; +import { LocalRuntime } from "./LocalRuntime"; +import { SSHRuntime } from "./SSHRuntime"; +import type { RuntimeConfig } from "@/types/runtime"; + +/** + * Create a Runtime instance based on the configuration + */ +export function createRuntime(config: RuntimeConfig): Runtime { + switch (config.type) { + case "local": + return new LocalRuntime(config.srcBaseDir); + + case "ssh": + return new SSHRuntime({ + host: config.host, + srcBaseDir: config.srcBaseDir, + identityFile: config.identityFile, + port: config.port, + }); + + default: { + const unknownConfig = config as { type?: string }; + throw new Error(`Unknown runtime type: ${unknownConfig.type ?? "undefined"}`); + } + } +} diff --git a/src/runtime/streamProcess.ts b/src/runtime/streamProcess.ts new file mode 100644 index 0000000000..e2ca0eeecf --- /dev/null +++ b/src/runtime/streamProcess.ts @@ -0,0 +1,68 @@ +/** + * Helper utilities for streaming child process output to InitLogger + */ + +import type { ChildProcess } from "child_process"; +import type { InitLogger } from "./Runtime"; + +/** + * Stream child process stdout/stderr to initLogger + * Prevents pipe buffer overflow by draining both streams. + * + * This is essential to prevent child processes from hanging when their + * output buffers fill up (typically 64KB). Always call this when spawning + * processes that may produce output. + * + * @param process Child process to stream from + * @param initLogger Logger to stream output to + * @param options Configuration for which streams to log + */ +export function streamProcessToLogger( + process: ChildProcess, + initLogger: InitLogger, + options?: { + /** If true, log stdout via logStdout. If false, drain silently. Default: false */ + logStdout?: boolean; + /** If true, log stderr via logStderr. If false, drain silently. Default: true */ + logStderr?: boolean; + /** Optional: Command string to log before streaming starts */ + command?: string; + } +): void { + const { logStdout = false, logStderr = true, command } = options ?? {}; + + // Log the command being executed (if provided) + if (command) { + initLogger.logStep(`Executing: ${command}`); + } + + // Drain stdout (prevent pipe overflow) + if (process.stdout) { + process.stdout.on("data", (data: Buffer) => { + if (logStdout) { + const output = data.toString(); + // Split by lines and log each non-empty line + const lines = output.split("\n").filter((line) => line.trim().length > 0); + for (const line of lines) { + initLogger.logStdout(line); + } + } + // Otherwise drain silently to prevent buffer overflow + }); + } + + // Stream stderr to logger + if (process.stderr) { + process.stderr.on("data", (data: Buffer) => { + if (logStderr) { + const output = data.toString(); + // Split by lines and log each non-empty line + const lines = output.split("\n").filter((line) => line.trim().length > 0); + for (const line of lines) { + initLogger.logStderr(line); + } + } + // Otherwise drain silently to prevent buffer overflow + }); + } +} diff --git a/src/runtime/tildeExpansion.ts b/src/runtime/tildeExpansion.ts new file mode 100644 index 0000000000..be37c08106 --- /dev/null +++ b/src/runtime/tildeExpansion.ts @@ -0,0 +1,60 @@ +/** + * Utilities for handling tilde path expansion in SSH commands + * + * When running commands over SSH, tilde paths need special handling: + * - Quoted tildes won't expand: `cd '~'` fails, but `cd "$HOME"` works + * - Must escape special shell characters when using $HOME expansion + */ + +/** + * Expand tilde path to $HOME-based path for use in SSH commands. + * + * Converts: + * - "~" → "$HOME" + * - "~/path" → "$HOME/path" + * - "/abs/path" → quoted absolute path (no expansion) + * + * The result is safe to use in bash commands and will properly expand at runtime. + * Special characters in paths are escaped for use inside double quotes. + * + * @param path - Path that may contain tilde prefix + * @returns Bash-safe string ready to use in commands + * + * @example + * expandTildeForSSH("~") // => "$HOME" + * expandTildeForSSH("~/workspace") // => "$HOME/workspace" + * expandTildeForSSH("/abs/path") // => '"/abs/path"' + */ +export function expandTildeForSSH(path: string): string { + if (path === "~") { + return '"$HOME"'; + } else if (path.startsWith("~/")) { + const pathAfterTilde = path.slice(2); + // Escape special chars for use inside double quotes + const escaped = pathAfterTilde + .replace(/\\/g, "\\\\") + .replace(/"/g, '\\"') + .replace(/\$/g, "\\$") + .replace(/`/g, "\\`"); + return `"$HOME/${escaped}"`; + } else { + // No tilde - quote the path as-is + // Note: We use double quotes to allow variable expansion if needed + return `"${path.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\$/g, "\\$").replace(/`/g, "\\`")}"`; + } +} + +/** + * Generate a cd command for use in SSH exec, handling tilde paths correctly. + * + * @param path - Working directory path (may contain tilde) + * @returns Bash command string like `cd "$HOME/path"` + * + * @example + * cdCommandForSSH("~") // => 'cd "$HOME"' + * cdCommandForSSH("~/workspace") // => 'cd "$HOME/workspace"' + * cdCommandForSSH("/abs/path") // => 'cd "/abs/path"' + */ +export function cdCommandForSSH(path: string): string { + return `cd ${expandTildeForSSH(path)}`; +} diff --git a/src/services/agentSession.ts b/src/services/agentSession.ts index 27ccadc237..ed2d345475 100644 --- a/src/services/agentSession.ts +++ b/src/services/agentSession.ts @@ -15,6 +15,7 @@ import type { Result } from "@/types/result"; import { Ok, Err } from "@/types/result"; import { enforceThinkingPolicy } from "@/utils/thinking/policy"; import { loadTokenizerForModel } from "@/utils/main/tokenizer"; +import { createRuntime } from "@/runtime/runtimeFactory"; interface ImagePart { url: string; @@ -180,7 +181,10 @@ export class AgentSession { // Metadata already exists, verify workspace path matches const metadata = existing.data; // Directory name uses workspace name (not stable ID) - const expectedPath = this.config.getWorkspacePath(metadata.projectPath, metadata.name); + const runtime = createRuntime( + metadata.runtimeConfig ?? { type: "local", srcBaseDir: this.config.srcDir } + ); + const expectedPath = runtime.getWorkspacePath(metadata.projectPath, metadata.name); assert( expectedPath === normalizedWorkspacePath, `Existing metadata workspace path mismatch for ${this.workspaceId}: expected ${expectedPath}, got ${normalizedWorkspacePath}` diff --git a/src/services/aiService.ts b/src/services/aiService.ts index 6ec1017f55..98e94e2e0d 100644 --- a/src/services/aiService.ts +++ b/src/services/aiService.ts @@ -13,6 +13,7 @@ import type { Config } from "@/config"; import { StreamManager } from "./streamManager"; import type { SendMessageError } from "@/types/errors"; import { getToolsForModel } from "@/utils/tools/tools"; +import { createRuntime } from "@/runtime/runtimeFactory"; import { secretsToRecord } from "@/types/secrets"; import type { CmuxProviderOptions } from "@/types/providerOptions"; import { log } from "./log"; @@ -97,6 +98,7 @@ if (typeof globalFetchWithExtras.certificate === "function") { * In tests, we preload them once during setup to ensure reliable concurrent execution. */ export async function preloadAISDKProviders(): Promise { + // Preload providers to ensure they're in the module cache before concurrent tests run await Promise.all([import("@ai-sdk/anthropic"), import("@ai-sdk/openai")]); } @@ -419,8 +421,10 @@ export class AIService extends EventEmitter { const [providerName] = modelString.split(":"); // Get tool names early for mode transition sentinel (stub config, no workspace context needed) + const earlyRuntime = createRuntime({ type: "local", srcBaseDir: process.cwd() }); const earlyAllTools = await getToolsForModel(modelString, { cwd: process.cwd(), + runtime: earlyRuntime, tempDir: os.tmpdir(), secrets: {}, }); @@ -496,7 +500,10 @@ export class AIService extends EventEmitter { } // Get workspace path (directory name uses workspace name) - const workspacePath = this.config.getWorkspacePath(metadata.projectPath, metadata.name); + const runtime = createRuntime( + metadata.runtimeConfig ?? { type: "local", srcBaseDir: this.config.srcDir } + ); + const workspacePath = runtime.getWorkspacePath(metadata.projectPath, metadata.name); // Build system message from workspace metadata const systemMessage = await buildSystemMessage( @@ -517,9 +524,10 @@ export class AIService extends EventEmitter { const streamToken = this.streamManager.generateStreamToken(); const tempDir = this.streamManager.createTempDirForStream(streamToken); - // Get model-specific tools with workspace path configuration and secrets + // Get model-specific tools with workspace path (correct for local or remote) const allTools = await getToolsForModel(modelString, { cwd: workspacePath, + runtime, secrets: secretsToRecord(projectSecrets), tempDir, }); diff --git a/src/services/gitService.test.ts b/src/services/gitService.test.ts index fee53d3ed6..237b68989b 100644 --- a/src/services/gitService.test.ts +++ b/src/services/gitService.test.ts @@ -25,6 +25,7 @@ async function createTestRepo(basePath: string): Promise { // Mock config for createWorktree const mockConfig = { + srcDir: path.join(__dirname, "..", "test-workspaces"), getWorkspacePath: (projectPath: string, branchName: string) => { return path.join(path.dirname(projectPath), "workspaces", branchName); }, @@ -54,6 +55,9 @@ describe("removeWorktreeSafe", () => { const result = await createWorktree(mockConfig, repoPath, "test-branch", { trunkBranch: defaultBranch, }); + if (!result.success) { + console.error("createWorktree failed:", result.error); + } expect(result.success).toBe(true); const worktreePath = result.path!; diff --git a/src/services/ipcMain.ts b/src/services/ipcMain.ts index acd43e4e13..d32fc4f2ed 100644 --- a/src/services/ipcMain.ts +++ b/src/services/ipcMain.ts @@ -9,10 +9,9 @@ import { createWorktree, listLocalBranches, detectDefaultTrunkBranch, - getMainWorktreeFromWorktree, getCurrentBranch, } from "@/git"; -import { removeWorktreeSafe, removeWorktree, pruneWorktrees } from "@/services/gitService"; +import { removeWorktree, pruneWorktrees } from "@/services/gitService"; import { AIService } from "@/services/aiService"; import { HistoryService } from "@/services/historyService"; import { PartialService } from "@/services/partialService"; @@ -31,7 +30,8 @@ import { secretsToRecord } from "@/types/secrets"; import { DisposableTempDir } from "@/services/tempDir"; import { BashExecutionService } from "@/services/bashExecutionService"; import { InitStateManager } from "@/services/initStateManager"; - +import { createRuntime } from "@/runtime/runtimeFactory"; +import type { RuntimeConfig } from "@/types/runtime"; /** * IpcMain - Manages all IPC handlers and service coordination * @@ -256,7 +256,13 @@ export class IpcMain { private registerWorkspaceHandlers(ipcMain: ElectronIpcMain): void { ipcMain.handle( IPC_CHANNELS.WORKSPACE_CREATE, - async (_event, projectPath: string, branchName: string, trunkBranch: string) => { + async ( + _event, + projectPath: string, + branchName: string, + trunkBranch: string, + runtimeConfig?: RuntimeConfig + ) => { // Validate workspace name const validation = validateWorkspaceName(branchName); if (!validation.valid) { @@ -272,75 +278,117 @@ export class IpcMain { // Generate stable workspace ID (stored in config, not used for directory name) const workspaceId = this.config.generateStableId(); - // Create the git worktree with the workspace name as directory name - const result = await createWorktree(this.config, projectPath, branchName, { + // Create runtime for workspace creation (defaults to local with srcDir as base) + const finalRuntimeConfig: RuntimeConfig = runtimeConfig ?? { + type: "local", + srcBaseDir: this.config.srcDir, + }; + const runtime = createRuntime(finalRuntimeConfig); + + // Create session BEFORE starting init so events can be forwarded + const session = this.getOrCreateSession(workspaceId); + + // Start init tracking (creates in-memory state + emits init-start event) + // This MUST complete before workspace creation returns so replayInit() finds state + this.initStateManager.startInit(workspaceId, projectPath); + + // Create InitLogger that bridges to InitStateManager + const initLogger = { + logStep: (message: string) => { + this.initStateManager.appendOutput(workspaceId, message, false); + }, + logStdout: (line: string) => { + this.initStateManager.appendOutput(workspaceId, line, false); + }, + logStderr: (line: string) => { + this.initStateManager.appendOutput(workspaceId, line, true); + }, + logComplete: (exitCode: number) => { + void this.initStateManager.endInit(workspaceId, exitCode); + }, + }; + + // Phase 1: Create workspace structure (FAST - returns immediately) + const createResult = await runtime.createWorkspace({ + projectPath, + branchName, trunkBranch: normalizedTrunkBranch, - directoryName: branchName, + directoryName: branchName, // Use branch name as directory name + initLogger, }); - if (result.success && result.path) { - const projectName = - projectPath.split("/").pop() ?? projectPath.split("\\").pop() ?? "unknown"; + if (!createResult.success || !createResult.workspacePath) { + return { success: false, error: createResult.error ?? "Failed to create workspace" }; + } - // Initialize workspace metadata with stable ID and name - const metadata = { - id: workspaceId, - name: branchName, // Name is separate from ID - projectName, - projectPath, // Full project path for computing worktree path - createdAt: new Date().toISOString(), - }; - // Note: metadata.json no longer written - config is the only source of truth + const projectName = + projectPath.split("/").pop() ?? projectPath.split("\\").pop() ?? "unknown"; - // Update config to include the new workspace (with full metadata) - this.config.editConfig((config) => { - let projectConfig = config.projects.get(projectPath); - if (!projectConfig) { - // Create project config if it doesn't exist - projectConfig = { - workspaces: [], - }; - config.projects.set(projectPath, projectConfig); - } - // Add workspace to project config with full metadata - projectConfig.workspaces.push({ - path: result.path!, - id: workspaceId, - name: branchName, - createdAt: metadata.createdAt, - }); - return config; + // Initialize workspace metadata with stable ID and name + const metadata = { + id: workspaceId, + name: branchName, // Name is separate from ID + projectName, + projectPath, // Full project path for computing worktree path + createdAt: new Date().toISOString(), + }; + // Note: metadata.json no longer written - config is the only source of truth + + // Update config to include the new workspace (with full metadata) + this.config.editConfig((config) => { + let projectConfig = config.projects.get(projectPath); + if (!projectConfig) { + // Create project config if it doesn't exist + projectConfig = { + workspaces: [], + }; + config.projects.set(projectPath, projectConfig); + } + // Add workspace to project config with full metadata + projectConfig.workspaces.push({ + path: createResult.workspacePath!, + id: workspaceId, + name: branchName, + createdAt: metadata.createdAt, + runtimeConfig: finalRuntimeConfig, // Save runtime config for exec operations }); + return config; + }); - // No longer creating symlinks - directory name IS the workspace name + // No longer creating symlinks - directory name IS the workspace name - // Get complete metadata from config (includes paths) - const allMetadata = this.config.getAllWorkspaceMetadata(); - const completeMetadata = allMetadata.find((m) => m.id === workspaceId); - if (!completeMetadata) { - return { success: false, error: "Failed to retrieve workspace metadata" }; - } + // Get complete metadata from config (includes paths) + const allMetadata = this.config.getAllWorkspaceMetadata(); + const completeMetadata = allMetadata.find((m) => m.id === workspaceId); + if (!completeMetadata) { + return { success: false, error: "Failed to retrieve workspace metadata" }; + } - // Emit metadata event for new workspace - const session = this.getOrCreateSession(workspaceId); - session.emitMetadata(completeMetadata); + // Emit metadata event for new workspace (session already created above) + session.emitMetadata(completeMetadata); - // Start optional .cmux/init hook (waits for state creation, then returns) - // This ensures replayInit() will find state when frontend subscribes - await this.startWorkspaceInitHook({ + // Phase 2: Initialize workspace asynchronously (SLOW - runs in background) + // This streams progress via initLogger and doesn't block the IPC return + void runtime + .initWorkspace({ projectPath, - worktreePath: result.path, - workspaceId, + branchName, + trunkBranch: normalizedTrunkBranch, + workspacePath: createResult.workspacePath, + initLogger, + }) + .catch((error: unknown) => { + const errorMsg = error instanceof Error ? error.message : String(error); + log.error(`initWorkspace failed for ${workspaceId}:`, error); + initLogger.logStderr(`Initialization failed: ${errorMsg}`); + initLogger.logComplete(-1); }); - // Return complete metadata with paths for frontend - return { - success: true, - metadata: completeMetadata, - }; - } - - return { success: false, error: result.error ?? "Failed to create workspace" }; + // Return immediately - init streams separately via initLogger events + return { + success: true, + metadata: completeMetadata, + }; } ); @@ -353,7 +401,7 @@ export class IpcMain { ipcMain.handle( IPC_CHANNELS.WORKSPACE_RENAME, - (_event, workspaceId: string, newName: string) => { + async (_event, workspaceId: string, newName: string) => { try { // Block rename during active streaming to prevent race conditions // (bash processes would have stale cwd, system message would be wrong) @@ -396,27 +444,24 @@ export class IpcMain { if (!workspace) { return Err("Failed to find workspace in config"); } - const { projectPath, workspacePath } = workspace; + const { projectPath } = workspace; - // Compute new path (based on name) - const oldPath = workspacePath; - const newPath = this.config.getWorkspacePath(projectPath, newName); + // Create runtime instance for this workspace + // For local runtimes, workdir should be srcDir, not the individual workspace path + const runtime = createRuntime( + oldMetadata.runtimeConfig ?? { type: "local", srcBaseDir: this.config.srcDir } + ); - // Use git worktree move to rename the worktree directory - // This updates git's internal worktree metadata correctly - try { - const result = spawnSync("git", ["worktree", "move", oldPath, newPath], { - cwd: projectPath, - }); - if (result.status !== 0) { - const stderr = result.stderr?.toString() || "Unknown error"; - return Err(`Failed to move worktree: ${stderr}`); - } - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - return Err(`Failed to move worktree: ${message}`); + // Delegate rename to runtime (handles both local and SSH) + // Runtime computes workspace paths internally from workdir + projectPath + workspace names + const renameResult = await runtime.renameWorkspace(projectPath, oldName, newName); + + if (!renameResult.success) { + return Err(renameResult.error); } + const { oldPath, newPath } = renameResult; + // Update config with new name and path this.config.editConfig((config) => { const projectConfig = config.projects.get(projectPath); @@ -425,6 +470,10 @@ export class IpcMain { if (workspaceEntry) { workspaceEntry.name = newName; workspaceEntry.path = newPath; // Update path to reflect new directory name + + // Note: We don't need to update runtimeConfig.srcBaseDir on rename + // because srcBaseDir is the base directory, not the individual workspace path + // The workspace path is computed dynamically via runtime.getWorkspacePath() } } return config; @@ -483,8 +532,11 @@ export class IpcMain { const sourceMetadata = sourceMetadataResult.data; const foundProjectPath = sourceMetadata.projectPath; - // Compute source workspace path from metadata (use name for directory lookup) - const sourceWorkspacePath = this.config.getWorkspacePath( + // Compute source workspace path from metadata (use name for directory lookup) using Runtime + const sourceRuntime = createRuntime( + sourceMetadata.runtimeConfig ?? { type: "local", srcBaseDir: this.config.srcDir } + ); + const sourceWorkspacePath = sourceRuntime.getWorkspacePath( foundProjectPath, sourceMetadata.name ); @@ -826,19 +878,26 @@ export class IpcMain { return Err(`Workspace ${workspaceId} not found in config`); } - // Get workspace path (directory name uses workspace name) - const namedPath = this.config.getWorkspacePath(metadata.projectPath, metadata.name); - // Load project secrets const projectSecrets = this.config.getProjectSecrets(metadata.projectPath); // Create scoped temp directory for this IPC call using tempDir = new DisposableTempDir("cmux-ipc-bash"); + // Create runtime and compute workspace path + // Runtime owns the path computation logic + const runtimeConfig = metadata.runtimeConfig ?? { + type: "local" as const, + srcBaseDir: this.config.srcDir, + }; + const runtime = createRuntime(runtimeConfig); + const workspacePath = runtime.getWorkspacePath(metadata.projectPath, metadata.name); + // Create bash tool with workspace's cwd and secrets // All IPC bash calls are from UI (background operations) - use truncate to avoid temp file spam const bashTool = createBashTool({ - cwd: namedPath, + cwd: workspacePath, // Bash executes in the workspace directory + runtime, secrets: secretsToRecord(projectSecrets), niceness: options?.niceness, tempDir: tempDir.path, @@ -977,75 +1036,49 @@ export class IpcMain { log.info(`Workspace ${workspaceId} metadata not found, considering removal successful`); return { success: true }; } + const metadata = metadataResult.data; - // Get actual workspace path from config (handles both legacy and new format) + // Get workspace from config to get projectPath const workspace = this.config.findWorkspace(workspaceId); if (!workspace) { log.info(`Workspace ${workspaceId} metadata exists but not found in config`); return { success: true }; // Consider it already removed } - const workspacePath = workspace.workspacePath; - - // Get project path from the worktree itself - const foundProjectPath = await getMainWorktreeFromWorktree(workspacePath); - - // Remove git worktree if we found the project path - if (foundProjectPath) { - const worktreeExists = await fsPromises - .access(workspacePath) - .then(() => true) - .catch(() => false); - - if (worktreeExists) { - // Use optimized removal unless force is explicitly requested - let gitResult: Awaited>; - - if (options.force) { - // Force deletion: Use git worktree remove --force directly - gitResult = await removeWorktree(foundProjectPath, workspacePath, { force: true }); - } else { - // Normal deletion: Use optimized rename-then-delete strategy - gitResult = await removeWorktreeSafe(foundProjectPath, workspacePath, { - onBackgroundDelete: (tempDir, error) => { - if (error) { - log.info( - `Background deletion failed for ${tempDir}: ${error.message ?? "unknown error"}` - ); - } - }, - }); - } - - if (!gitResult.success) { - const errorMessage = gitResult.error ?? "Unknown error"; - const normalizedError = errorMessage.toLowerCase(); - const looksLikeMissingWorktree = - normalizedError.includes("not a working tree") || - normalizedError.includes("does not exist") || - normalizedError.includes("no such file"); - - if (looksLikeMissingWorktree) { - const pruneResult = await pruneWorktrees(foundProjectPath); - if (!pruneResult.success) { - log.info( - `Failed to prune stale worktrees for ${foundProjectPath} after removeWorktree error: ${ - pruneResult.error ?? "unknown error" - }` - ); - } - } else { - return gitResult; + const { projectPath, workspacePath } = workspace; + + // Create runtime instance for this workspace + // For local runtimes, workdir should be srcDir, not the individual workspace path + const runtime = createRuntime( + metadata.runtimeConfig ?? { type: "local", srcBaseDir: this.config.srcDir } + ); + + // Delegate deletion to runtime - it handles all path computation and existence checks + const deleteResult = await runtime.deleteWorkspace(projectPath, metadata.name, options.force); + + if (!deleteResult.success) { + const errorMessage = deleteResult.error; + const normalizedError = errorMessage.toLowerCase(); + const looksLikeMissingWorktree = + normalizedError.includes("not a working tree") || + normalizedError.includes("does not exist") || + normalizedError.includes("no such file"); + + if (looksLikeMissingWorktree) { + // Worktree is already gone or stale - prune git records if this is a local worktree + if (metadata.runtimeConfig?.type !== "ssh") { + const pruneResult = await pruneWorktrees(projectPath); + if (!pruneResult.success) { + log.info( + `Failed to prune stale worktrees for ${projectPath} after deleteWorkspace error: ${ + pruneResult.error ?? "unknown error" + }` + ); } } + // Treat missing workspace as success (idempotent operation) } else { - const pruneResult = await pruneWorktrees(foundProjectPath); - if (!pruneResult.success) { - log.info( - `Failed to prune stale worktrees for ${foundProjectPath} after detecting missing workspace at ${workspacePath}: ${ - pruneResult.error ?? "unknown error" - }` - ); - } + // Real error (e.g., dirty workspace without force) - return it + return { success: false, error: deleteResult.error }; } } @@ -1055,11 +1088,7 @@ export class IpcMain { return { success: false, error: aiResult.error }; } - // No longer need to remove symlinks (directory IS the workspace name) - // Update config to remove the workspace from all projects - // We iterate through all projects instead of relying on foundProjectPath - // because the worktree might be deleted (so getMainWorktreeFromWorktree fails) const projectsConfig = this.config.loadConfigOrDefault(); let configUpdated = false; for (const [_projectPath, projectConfig] of projectsConfig.projects.entries()) { diff --git a/src/services/tools/bash.test.ts b/src/services/tools/bash.test.ts index 28145126c1..fc58458e68 100644 --- a/src/services/tools/bash.test.ts +++ b/src/services/tools/bash.test.ts @@ -4,6 +4,7 @@ import type { BashToolArgs, BashToolResult } from "@/types/tools"; import { BASH_MAX_TOTAL_BYTES } from "@/constants/toolLimits"; import * as fs from "fs"; import { TestTempDir } from "./testHelpers"; +import { createRuntime } from "@/runtime/runtimeFactory"; import type { ToolCallOptions } from "ai"; @@ -20,6 +21,7 @@ function createTestBashTool(options?: { niceness?: number }) { const tempDir = new TestTempDir("test-bash"); const tool = createBashTool({ cwd: process.cwd(), + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), tempDir: tempDir.path, ...options, }); @@ -161,6 +163,7 @@ describe("bash tool", () => { const tempDir = new TestTempDir("test-bash-truncate"); const tool = createBashTool({ cwd: process.cwd(), + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), tempDir: tempDir.path, overflow_policy: "truncate", }); @@ -199,6 +202,7 @@ describe("bash tool", () => { const tempDir = new TestTempDir("test-bash-overlong-line"); const tool = createBashTool({ cwd: process.cwd(), + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), tempDir: tempDir.path, overflow_policy: "truncate", }); @@ -230,6 +234,7 @@ describe("bash tool", () => { const tempDir = new TestTempDir("test-bash-boundary"); const tool = createBashTool({ cwd: process.cwd(), + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), tempDir: tempDir.path, overflow_policy: "truncate", }); @@ -265,6 +270,7 @@ describe("bash tool", () => { const tempDir = new TestTempDir("test-bash-default"); const tool = createBashTool({ cwd: process.cwd(), + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), tempDir: tempDir.path, // overflow_policy not specified - should default to tmpfile }); @@ -296,6 +302,7 @@ describe("bash tool", () => { const tempDir = new TestTempDir("test-bash-100kb"); const tool = createBashTool({ cwd: process.cwd(), + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), tempDir: tempDir.path, }); @@ -347,6 +354,7 @@ describe("bash tool", () => { const tempDir = new TestTempDir("test-bash-100kb-limit"); const tool = createBashTool({ cwd: process.cwd(), + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), tempDir: tempDir.path, }); @@ -389,6 +397,7 @@ describe("bash tool", () => { const tempDir = new TestTempDir("test-bash-no-kill-display"); const tool = createBashTool({ cwd: process.cwd(), + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), tempDir: tempDir.path, }); @@ -430,6 +439,7 @@ describe("bash tool", () => { const tempDir = new TestTempDir("test-bash-per-line-kill"); const tool = createBashTool({ cwd: process.cwd(), + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), tempDir: tempDir.path, }); @@ -469,6 +479,7 @@ describe("bash tool", () => { const tempDir = new TestTempDir("test-bash-under-limit"); const tool = createBashTool({ cwd: process.cwd(), + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), tempDir: tempDir.path, }); @@ -498,6 +509,7 @@ describe("bash tool", () => { const tempDir = new TestTempDir("test-bash-exact-limit"); const tool = createBashTool({ cwd: process.cwd(), + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), tempDir: tempDir.path, }); @@ -568,7 +580,7 @@ describe("bash tool", () => { expect(result.success).toBe(false); if (!result.success) { - expect(result.error).toContain("timed out"); + expect(result.error).toContain("timeout"); expect(result.exitCode).toBe(-1); } }); @@ -851,7 +863,7 @@ describe("bash tool", () => { expect(result.success).toBe(false); if (!result.success) { - expect(result.error).toContain("timed out"); + expect(result.error).toContain("timeout"); expect(duration).toBeLessThan(2000); } }); diff --git a/src/services/tools/bash.ts b/src/services/tools/bash.ts index 66a55425bc..23dbcabfa2 100644 --- a/src/services/tools/bash.ts +++ b/src/services/tools/bash.ts @@ -1,9 +1,7 @@ import { tool } from "ai"; -import { spawn } from "child_process"; -import type { ChildProcess } from "child_process"; import { createInterface } from "readline"; import * as path from "path"; -import * as fs from "fs"; +import { Readable } from "stream"; import { BASH_DEFAULT_TIMEOUT_SECS, BASH_HARD_MAX_LINES, @@ -13,48 +11,12 @@ import { BASH_TRUNCATE_MAX_TOTAL_BYTES, BASH_TRUNCATE_MAX_FILE_BYTES, } from "@/constants/toolLimits"; +import { EXIT_CODE_ABORTED, EXIT_CODE_TIMEOUT } from "@/constants/exitCodes"; import type { BashToolResult } from "@/types/tools"; import type { ToolConfiguration, ToolFactory } from "@/utils/tools/tools"; import { TOOL_DEFINITIONS } from "@/utils/tools/toolDefinitions"; -/** - * Wraps a ChildProcess to make it disposable for use with `using` statements. - * Always kills the entire process group with SIGKILL to prevent zombie processes. - * SIGKILL cannot be caught or ignored, guaranteeing immediate cleanup. - */ -class DisposableProcess implements Disposable { - private disposed = false; - - constructor(private readonly process: ChildProcess) {} - - [Symbol.dispose](): void { - // Prevent double-signalling if dispose is called multiple times - // (e.g., manually via abort/timeout, then automatically via `using`) - if (this.disposed || this.process.pid === undefined) { - return; - } - - this.disposed = true; - - try { - // Kill entire process group with SIGKILL - cannot be caught/ignored - process.kill(-this.process.pid, "SIGKILL"); - } catch { - // Fallback: try killing just the main process - try { - this.process.kill("SIGKILL"); - } catch { - // Process already dead - ignore - } - } - } - - get child(): ChildProcess { - return this.process; - } -} - /** * Bash execution tool factory for AI assistant * Creates a bash tool that can execute commands with a configurable timeout @@ -77,7 +39,7 @@ export const createBashTool: ToolFactory = (config: ToolConfiguration) => { inputSchema: TOOL_DEFINITIONS.bash.schema, execute: async ({ script, timeout_secs }, { abortSignal }): Promise => { // Validate script is not empty - likely indicates a malformed tool call - // eslint-disable-next-line @typescript-eslint/prefer-optional-chain + if (!script || script.trim().length === 0) { return { success: false, @@ -115,12 +77,15 @@ export const createBashTool: ToolFactory = (config: ToolConfiguration) => { let fileTruncated = false; // Hit 100KB file limit // Detect redundant cd to working directory - // Match patterns like: "cd /path &&", "cd /path;", "cd '/path' &&", "cd \"/path\" &&" - const cdPattern = /^\s*cd\s+['"]?([^'";&|]+)['"]?\s*[;&|]/; + // Note: config.cwd is the actual execution path (local for LocalRuntime, remote for SSHRuntime) + // Match patterns like: "cd /path &&", "cd /path;", "cd '/path' &&", "cd "/path" &&" + const cdPattern = /^\s*cd\s+['"]?([^'";\\&|]+)['"]?\s*[;&|]/; const match = cdPattern.exec(script); if (match) { const targetPath = match[1].trim(); - // Normalize paths for comparison (resolve to absolute) + // For SSH runtime, config.cwd might use $HOME - need to handle this + // Normalize paths for comparison (resolve to absolute where possible) + // Note: This check is best-effort - it won't catch all cases on SSH (e.g., ~/path vs $HOME/path) const normalizedTarget = path.resolve(config.cwd, targetPath); const normalizedCwd = path.resolve(config.cwd); @@ -134,44 +99,17 @@ export const createBashTool: ToolFactory = (config: ToolConfiguration) => { } } - // Create the process with `using` for automatic cleanup - // If niceness is specified, spawn nice directly to avoid escaping issues - const spawnCommand = config.niceness !== undefined ? "nice" : "bash"; - const spawnArgs = - config.niceness !== undefined - ? ["-n", config.niceness.toString(), "bash", "-c", script] - : ["-c", script]; - - using childProcess = new DisposableProcess( - spawn(spawnCommand, spawnArgs, { - cwd: config.cwd, - env: { - ...process.env, - // Inject secrets as environment variables - ...(config.secrets ?? {}), - // Prevent interactive editors from blocking bash execution - // This is critical for git operations like rebase/commit that try to open editors - GIT_EDITOR: "true", // Git-specific editor (highest priority) - GIT_SEQUENCE_EDITOR: "true", // For interactive rebase sequences - EDITOR: "true", // General fallback for non-git commands - VISUAL: "true", // Another common editor environment variable - // Prevent git from prompting for credentials - // This is critical for operations like fetch/pull that might try to authenticate - // Without this, git can hang waiting for user input if credentials aren't configured - GIT_TERMINAL_PROMPT: "0", // Disables git credential prompts - }, - stdio: ["ignore", "pipe", "pipe"], - // CRITICAL: Spawn as detached process group leader to prevent zombie processes. - // When a bash script spawns background processes (e.g., `sleep 100 &`), those - // children would normally be reparented to init when bash exits, becoming orphans. - // With detached:true, bash becomes a process group leader, allowing us to kill - // the entire group (including all backgrounded children) via process.kill(-pid). - detached: true, - }) - ); + // Execute using runtime interface (works for both local and SSH) + const execStream = await config.runtime.exec(script, { + cwd: config.cwd, + env: config.secrets, + timeout: effectiveTimeout, + niceness: config.niceness, + abortSignal, + }); // Use a promise to wait for completion - return await new Promise((resolve) => { + return await new Promise((resolve, _reject) => { const lines: string[] = []; let truncated = false; let exitCode: number | null = null; @@ -181,7 +119,6 @@ export const createBashTool: ToolFactory = (config: ToolConfiguration) => { const resolveOnce = (result: BashToolResult) => { if (!resolved) { resolved = true; - clearTimeout(timeoutHandle); // Clean up abort listener if present if (abortSignal && abortListener) { abortSignal.removeEventListener("abort", abortListener); @@ -190,29 +127,107 @@ export const createBashTool: ToolFactory = (config: ToolConfiguration) => { } }; - // Set up abort signal listener - kill process when stream is cancelled + // Set up abort signal listener - cancellation is handled by runtime let abortListener: (() => void) | null = null; if (abortSignal) { abortListener = () => { if (!resolved) { - childProcess[Symbol.dispose](); - // The close event will fire and handle finalization with abort error + // Runtime handles the actual cancellation + // We just need to clean up our side } }; abortSignal.addEventListener("abort", abortListener); } - // Set up timeout - kill process and let close event handle cleanup - const timeoutHandle = setTimeout(() => { - if (!resolved) { - childProcess[Symbol.dispose](); - // The close event will fire and handle finalization with timeout error - } - }, effectiveTimeout * 1000); + // Close stdin immediately - we don't need to send any input + // This is critical: not closing stdin can cause the runtime to wait forever + execStream.stdin.close().catch(() => { + // Ignore errors - stream might already be closed + }); + + // Convert Web Streams to Node.js streams for readline + // Type mismatch between Node.js ReadableStream and Web ReadableStream - safe to cast + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-explicit-any + const stdoutNodeStream = Readable.fromWeb(execStream.stdout as any); + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-explicit-any + const stderrNodeStream = Readable.fromWeb(execStream.stderr as any); + + // Set up readline for both stdout and stderr to handle buffering + const stdoutReader = createInterface({ input: stdoutNodeStream }); + const stderrReader = createInterface({ input: stderrNodeStream }); + + // Track when streams end + let stdoutEnded = false; + let stderrEnded = false; + + // Forward-declare functions that will be defined below + // eslint-disable-next-line prefer-const + let tryFinalize: () => void; + // eslint-disable-next-line prefer-const + let finalize: () => void; + + // Helper to tear down streams and readline interfaces + const teardown = () => { + stdoutReader.close(); + stderrReader.close(); + stdoutNodeStream.destroy(); + stderrNodeStream.destroy(); + }; - // Set up readline for both stdout and stderr to handle line buffering - const stdoutReader = createInterface({ input: childProcess.child.stdout! }); - const stderrReader = createInterface({ input: childProcess.child.stderr! }); + // IMPORTANT: Attach exit handler IMMEDIATELY to prevent unhandled rejection + // Handle both normal exits and special error codes (EXIT_CODE_ABORTED, EXIT_CODE_TIMEOUT) + execStream.exitCode + .then((code) => { + exitCode = code; + + // Check for special error codes from runtime + if (code === EXIT_CODE_ABORTED) { + // Aborted via AbortSignal + teardown(); + resolveOnce({ + success: false, + error: "Command execution was aborted", + exitCode: -1, + wall_duration_ms: Math.round(performance.now() - startTime), + }); + return; + } + + if (code === EXIT_CODE_TIMEOUT) { + // Exceeded timeout + teardown(); + resolveOnce({ + success: false, + error: `Command exceeded timeout of ${effectiveTimeout} seconds`, + exitCode: -1, + wall_duration_ms: Math.round(performance.now() - startTime), + }); + return; + } + + // Normal exit - try to finalize if streams have already closed + tryFinalize(); + // Set a grace period - if streams don't close within 50ms, force finalize + setTimeout(() => { + if (!resolved && exitCode !== null) { + stdoutNodeStream.destroy(); + stderrNodeStream.destroy(); + stdoutEnded = true; + stderrEnded = true; + tryFinalize(); + } + }, 50); + }) + .catch((err: Error) => { + // Only actual errors (like spawn failure) should reach here now + teardown(); + resolveOnce({ + success: false, + error: `Failed to execute command: ${err.message}`, + exitCode: -1, + wall_duration_ms: Math.round(performance.now() - startTime), + }); + }); // Helper to trigger display truncation (stop showing to agent, keep collecting) const triggerDisplayTruncation = (reason: string) => { @@ -222,7 +237,7 @@ export const createBashTool: ToolFactory = (config: ToolConfiguration) => { // Don't kill process yet - keep collecting up to file limit }; - // Helper to trigger file truncation (stop collecting, kill process) + // Helper to trigger file truncation (stop collecting, close streams) const triggerFileTruncation = (reason: string) => { fileTruncated = true; displayTruncated = true; @@ -230,7 +245,11 @@ export const createBashTool: ToolFactory = (config: ToolConfiguration) => { overflowReason = reason; stdoutReader.close(); stderrReader.close(); - childProcess[Symbol.dispose](); + // Cancel the streams to stop the process + // eslint-disable-next-line @typescript-eslint/no-empty-function + execStream.stdout.cancel().catch(() => {}); + // eslint-disable-next-line @typescript-eslint/no-empty-function + execStream.stderr.cancel().catch(() => {}); }; stdoutReader.on("line", (line) => { @@ -319,7 +338,15 @@ export const createBashTool: ToolFactory = (config: ToolConfiguration) => { } }); - // Track when streams end + // Define tryFinalize (already declared above) + tryFinalize = () => { + if (resolved) return; + // Only finalize when both streams have closed and we have an exit code + if (stdoutEnded && stderrEnded && exitCode !== null) { + finalize(); + } + }; + stdoutReader.on("close", () => { stdoutEnded = true; tryFinalize(); @@ -330,46 +357,8 @@ export const createBashTool: ToolFactory = (config: ToolConfiguration) => { tryFinalize(); }); - // Use 'exit' event instead of 'close' to handle background processes correctly. - // The 'close' event waits for ALL child processes (including background ones) to exit, - // which causes hangs when users spawn background processes like servers. - // The 'exit' event fires when the main bash process exits, which is what we want. - let stdoutEnded = false; - let stderrEnded = false; - let processExited = false; - - const handleExit = (code: number | null) => { - processExited = true; - exitCode = code; - // Try to finalize immediately if streams have ended - tryFinalize(); - // Set a grace period timer - if streams don't end within 50ms, finalize anyway - // This handles background processes that keep stdio open - setTimeout(() => { - if (!resolved && processExited) { - // Forcibly destroy streams to ensure they close - childProcess.child.stdout?.destroy(); - childProcess.child.stderr?.destroy(); - stdoutEnded = true; - stderrEnded = true; - finalize(); - } - }, 50); - }; - - const tryFinalize = () => { - if (resolved) return; - // Finalize if process exited AND (both streams ended OR 100ms grace period passed) - if (!processExited) return; - - // If we've already collected output, finalize immediately - // Otherwise wait a bit for streams to flush - if (stdoutEnded && stderrEnded) { - finalize(); - } - }; - - const finalize = () => { + // Define finalize (already declared above) + finalize = () => { if (resolved) return; // Round to integer to preserve tokens. @@ -381,8 +370,6 @@ export const createBashTool: ToolFactory = (config: ToolConfiguration) => { // Check if this was aborted (stream cancelled) const wasAborted = abortSignal?.aborted ?? false; - // Check if this was a timeout (process killed and no natural exit code) - const timedOut = !wasAborted && wall_duration_ms >= effectiveTimeout * 1000 - 10; // 10ms tolerance if (wasAborted) { resolveOnce({ @@ -391,13 +378,6 @@ export const createBashTool: ToolFactory = (config: ToolConfiguration) => { exitCode: -2, wall_duration_ms, }); - } else if (timedOut) { - resolveOnce({ - success: false, - error: `Command timed out after ${effectiveTimeout} seconds`, - exitCode: -1, - wall_duration_ms, - }); } else if (truncated) { // Handle overflow based on policy const overflowPolicy = config.overflow_policy ?? "tmpfile"; @@ -437,14 +417,21 @@ export const createBashTool: ToolFactory = (config: ToolConfiguration) => { // tmpfile policy: Save overflow output to temp file instead of returning an error // We don't show ANY of the actual output to avoid overwhelming context. // Instead, save it to a temp file and encourage the agent to use filtering tools. - try { - // Use 8 hex characters for short, memorable temp file IDs - const fileId = Math.random().toString(16).substring(2, 10); - const overflowPath = path.join(config.tempDir, `bash-${fileId}.txt`); - const fullOutput = lines.join("\n"); - fs.writeFileSync(overflowPath, fullOutput, "utf-8"); - - const output = `[OUTPUT OVERFLOW - ${overflowReason ?? "unknown reason"}] + (async () => { + try { + // Use 8 hex characters for short, memorable temp file IDs + const fileId = Math.random().toString(16).substring(2, 10); + const overflowPath = path.join(config.tempDir, `bash-${fileId}.txt`); + const fullOutput = lines.join("\n"); + + // Use runtime.writeFile() for SSH support + const writer = config.runtime.writeFile(overflowPath); + const encoder = new TextEncoder(); + const writerInstance = writer.getWriter(); + await writerInstance.write(encoder.encode(fullOutput)); + await writerInstance.close(); + + const output = `[OUTPUT OVERFLOW - ${overflowReason ?? "unknown reason"}] Full output (${lines.length} lines) saved to ${overflowPath} @@ -452,22 +439,39 @@ Use selective filtering tools (e.g. grep) to extract relevant information and co File will be automatically cleaned up when stream ends.`; - resolveOnce({ - success: false, - error: output, - exitCode: -1, - wall_duration_ms, - }); - } catch (err) { - // If temp file creation fails, fall back to original error - resolveOnce({ - success: false, - error: `Command output overflow: ${overflowReason ?? "unknown reason"}. Failed to save overflow to temp file: ${String(err)}`, - exitCode: -1, - wall_duration_ms, - }); - } + resolveOnce({ + success: false, + error: output, + exitCode: -1, + wall_duration_ms, + }); + } catch (err) { + // If temp file creation fails, fall back to original error + resolveOnce({ + success: false, + error: `Command output overflow: ${overflowReason ?? "unknown reason"}. Failed to save overflow to temp file: ${String(err)}`, + exitCode: -1, + wall_duration_ms, + }); + } + })(); } + } else if (exitCode === EXIT_CODE_TIMEOUT) { + // Timeout - special exit code from runtime + resolveOnce({ + success: false, + error: `Command exceeded timeout of ${effectiveTimeout} seconds`, + exitCode: -1, + wall_duration_ms, + }); + } else if (exitCode === EXIT_CODE_ABORTED) { + // Aborted - special exit code from runtime + resolveOnce({ + success: false, + error: "Command execution was aborted", + exitCode: -1, + wall_duration_ms, + }); } else if (exitCode === 0 || exitCode === null) { resolveOnce({ success: true, @@ -485,20 +489,6 @@ File will be automatically cleaned up when stream ends.`; }); } }; - - // Listen to exit event (fires when bash exits, before streams close) - childProcess.child.on("exit", handleExit); - - childProcess.child.on("error", (err: Error) => { - if (resolved) return; - const wall_duration_ms = performance.now() - startTime; - resolveOnce({ - success: false, - error: `Failed to execute command: ${err.message}`, - exitCode: -1, - wall_duration_ms, - }); - }); }); }, }); diff --git a/src/services/tools/fileCommon.test.ts b/src/services/tools/fileCommon.test.ts index 983e48ed9a..835f9a514b 100644 --- a/src/services/tools/fileCommon.test.ts +++ b/src/services/tools/fileCommon.test.ts @@ -1,29 +1,36 @@ import { describe, it, expect } from "bun:test"; -import type * as fs from "fs"; +import type { FileStat } from "@/runtime/Runtime"; import { validatePathInCwd, validateFileSize, MAX_FILE_SIZE } from "./fileCommon"; +import { createRuntime } from "@/runtime/runtimeFactory"; describe("fileCommon", () => { describe("validateFileSize", () => { it("should return null for files within size limit", () => { - const stats = { + const stats: FileStat = { size: 1024, // 1KB - } satisfies Partial as fs.Stats; + modifiedTime: new Date(), + isDirectory: false, + }; expect(validateFileSize(stats)).toBeNull(); }); it("should return null for files at exactly the limit", () => { - const stats = { + const stats: FileStat = { size: MAX_FILE_SIZE, - } satisfies Partial as fs.Stats; + modifiedTime: new Date(), + isDirectory: false, + }; expect(validateFileSize(stats)).toBeNull(); }); it("should return error for files exceeding size limit", () => { - const stats = { + const stats: FileStat = { size: MAX_FILE_SIZE + 1, - } satisfies Partial as fs.Stats; + modifiedTime: new Date(), + isDirectory: false, + }; const result = validateFileSize(stats); expect(result).not.toBeNull(); @@ -32,9 +39,11 @@ describe("fileCommon", () => { }); it("should include size information in error message", () => { - const stats = { + const stats: FileStat = { size: MAX_FILE_SIZE * 2, // 2MB - } satisfies Partial as fs.Stats; + modifiedTime: new Date(), + isDirectory: false, + }; const result = validateFileSize(stats); expect(result?.error).toContain("2.00MB"); @@ -42,9 +51,11 @@ describe("fileCommon", () => { }); it("should suggest alternative tools in error message", () => { - const stats = { + const stats: FileStat = { size: MAX_FILE_SIZE + 1, - } satisfies Partial as fs.Stats; + modifiedTime: new Date(), + isDirectory: false, + }; const result = validateFileSize(stats); expect(result?.error).toContain("grep"); @@ -54,68 +65,69 @@ describe("fileCommon", () => { describe("validatePathInCwd", () => { const cwd = "/workspace/project"; + const runtime = createRuntime({ type: "local", srcBaseDir: cwd }); it("should allow relative paths within cwd", () => { - expect(validatePathInCwd("src/file.ts", cwd)).toBeNull(); - expect(validatePathInCwd("./src/file.ts", cwd)).toBeNull(); - expect(validatePathInCwd("file.ts", cwd)).toBeNull(); + expect(validatePathInCwd("src/file.ts", cwd, runtime)).toBeNull(); + expect(validatePathInCwd("./src/file.ts", cwd, runtime)).toBeNull(); + expect(validatePathInCwd("file.ts", cwd, runtime)).toBeNull(); }); it("should allow absolute paths within cwd", () => { - expect(validatePathInCwd("/workspace/project/src/file.ts", cwd)).toBeNull(); - expect(validatePathInCwd("/workspace/project/file.ts", cwd)).toBeNull(); + expect(validatePathInCwd("/workspace/project/src/file.ts", cwd, runtime)).toBeNull(); + expect(validatePathInCwd("/workspace/project/file.ts", cwd, runtime)).toBeNull(); }); it("should reject paths that go up and outside cwd with ..", () => { - const result = validatePathInCwd("../outside.ts", cwd); + const result = validatePathInCwd("../outside.ts", cwd, runtime); expect(result).not.toBeNull(); expect(result?.error).toContain("restricted to the workspace directory"); expect(result?.error).toContain("/workspace/project"); }); it("should reject paths that go multiple levels up", () => { - const result = validatePathInCwd("../../outside.ts", cwd); + const result = validatePathInCwd("../../outside.ts", cwd, runtime); expect(result).not.toBeNull(); expect(result?.error).toContain("restricted to the workspace directory"); }); it("should reject paths that go down then up outside cwd", () => { - const result = validatePathInCwd("src/../../outside.ts", cwd); + const result = validatePathInCwd("src/../../outside.ts", cwd, runtime); expect(result).not.toBeNull(); expect(result?.error).toContain("restricted to the workspace directory"); }); it("should reject absolute paths outside cwd", () => { - const result = validatePathInCwd("/etc/passwd", cwd); + const result = validatePathInCwd("/etc/passwd", cwd, runtime); expect(result).not.toBeNull(); expect(result?.error).toContain("restricted to the workspace directory"); }); it("should reject absolute paths in different directory tree", () => { - const result = validatePathInCwd("/home/user/file.ts", cwd); + const result = validatePathInCwd("/home/user/file.ts", cwd, runtime); expect(result).not.toBeNull(); expect(result?.error).toContain("restricted to the workspace directory"); }); it("should handle paths with trailing slashes", () => { - expect(validatePathInCwd("src/", cwd)).toBeNull(); + expect(validatePathInCwd("src/", cwd, runtime)).toBeNull(); }); it("should handle nested paths correctly", () => { - expect(validatePathInCwd("src/components/Button/index.ts", cwd)).toBeNull(); - expect(validatePathInCwd("./src/components/Button/index.ts", cwd)).toBeNull(); + expect(validatePathInCwd("src/components/Button/index.ts", cwd, runtime)).toBeNull(); + expect(validatePathInCwd("./src/components/Button/index.ts", cwd, runtime)).toBeNull(); }); it("should provide helpful error message mentioning to ask user", () => { - const result = validatePathInCwd("../outside.ts", cwd); + const result = validatePathInCwd("../outside.ts", cwd, runtime); expect(result?.error).toContain("ask the user for permission"); }); it("should work with cwd that has trailing slash", () => { const cwdWithSlash = "/workspace/project/"; - expect(validatePathInCwd("src/file.ts", cwdWithSlash)).toBeNull(); + expect(validatePathInCwd("src/file.ts", cwdWithSlash, runtime)).toBeNull(); - const result = validatePathInCwd("../outside.ts", cwdWithSlash); + const result = validatePathInCwd("../outside.ts", cwdWithSlash, runtime); expect(result).not.toBeNull(); }); }); diff --git a/src/services/tools/fileCommon.ts b/src/services/tools/fileCommon.ts index c6726ddd3f..e5f6163379 100644 --- a/src/services/tools/fileCommon.ts +++ b/src/services/tools/fileCommon.ts @@ -1,6 +1,7 @@ -import type * as fs from "fs"; import * as path from "path"; import { createPatch } from "diff"; +import type { FileStat, Runtime } from "@/runtime/Runtime"; +import { SSHRuntime } from "@/runtime/SSHRuntime"; // WRITE_DENIED_PREFIX moved to @/types/tools for frontend/backend sharing @@ -36,7 +37,7 @@ export function generateDiff(filePath: string, oldContent: string, newContent: s * @param stats - File stats from fs.stat() * @returns Error object if file is too large, null if valid */ -export function validateFileSize(stats: fs.Stats): { error: string } | null { +export function validateFileSize(stats: FileStat): { error: string } | null { if (stats.size > MAX_FILE_SIZE) { const sizeMB = (stats.size / (1024 * 1024)).toFixed(2); const maxMB = (MAX_FILE_SIZE / (1024 * 1024)).toFixed(2); @@ -53,9 +54,22 @@ export function validateFileSize(stats: fs.Stats): { error: string } | null { * * @param filePath - The file path to validate (can be relative or absolute) * @param cwd - The working directory that file operations are restricted to + * @param runtime - The runtime (used to detect SSH - TODO: make path validation runtime-aware) * @returns Error object if invalid, null if valid */ -export function validatePathInCwd(filePath: string, cwd: string): { error: string } | null { +export function validatePathInCwd( + filePath: string, + cwd: string, + runtime: Runtime +): { error: string } | null { + // TODO: Make path validation runtime-aware instead of skipping for SSH. + // For now, skip local path validation for SSH runtimes since: + // 1. Node's path module doesn't understand remote paths (~/cmux/branch) + // 2. The runtime's own file operations will fail on invalid paths anyway + if (runtime instanceof SSHRuntime) { + return null; + } + // Resolve the path (handles relative paths and normalizes) const resolvedPath = path.isAbsolute(filePath) ? path.resolve(filePath) diff --git a/src/services/tools/file_edit_insert.test.ts b/src/services/tools/file_edit_insert.test.ts index ba104349a2..b9353e80b1 100644 --- a/src/services/tools/file_edit_insert.test.ts +++ b/src/services/tools/file_edit_insert.test.ts @@ -6,6 +6,7 @@ import { createFileEditInsertTool } from "./file_edit_insert"; import type { FileEditInsertToolArgs, FileEditInsertToolResult } from "@/types/tools"; import type { ToolCallOptions } from "ai"; import { TestTempDir } from "./testHelpers"; +import { createRuntime } from "@/runtime/runtimeFactory"; // Mock ToolCallOptions for testing const mockToolCallOptions: ToolCallOptions = { @@ -19,6 +20,7 @@ function createTestFileEditInsertTool(options?: { cwd?: string }) { const tempDir = new TestTempDir("test-file-edit-insert"); const tool = createFileEditInsertTool({ cwd: options?.cwd ?? process.cwd(), + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), tempDir: tempDir.path, }); @@ -209,7 +211,11 @@ describe("file_edit_insert tool", () => { // Setup const nonExistentPath = path.join(testDir, "newfile.txt"); - const tool = createFileEditInsertTool({ cwd: testDir, tempDir: "/tmp" }); + const tool = createFileEditInsertTool({ + cwd: testDir, + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), + tempDir: "/tmp", + }); const args: FileEditInsertToolArgs = { file_path: nonExistentPath, line_offset: 0, @@ -231,7 +237,11 @@ describe("file_edit_insert tool", () => { // Setup const nestedPath = path.join(testDir, "nested", "dir", "newfile.txt"); - const tool = createFileEditInsertTool({ cwd: testDir, tempDir: "/tmp" }); + const tool = createFileEditInsertTool({ + cwd: testDir, + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), + tempDir: "/tmp", + }); const args: FileEditInsertToolArgs = { file_path: nestedPath, line_offset: 0, @@ -254,7 +264,11 @@ describe("file_edit_insert tool", () => { const initialContent = "line1\nline2"; await fs.writeFile(testFilePath, initialContent); - const tool = createFileEditInsertTool({ cwd: testDir, tempDir: "/tmp" }); + const tool = createFileEditInsertTool({ + cwd: testDir, + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), + tempDir: "/tmp", + }); const args: FileEditInsertToolArgs = { file_path: testFilePath, line_offset: 1, diff --git a/src/services/tools/file_edit_insert.ts b/src/services/tools/file_edit_insert.ts index 9637619ac4..12a5599c61 100644 --- a/src/services/tools/file_edit_insert.ts +++ b/src/services/tools/file_edit_insert.ts @@ -1,5 +1,4 @@ import { tool } from "ai"; -import * as fs from "fs/promises"; import * as path from "path"; import type { FileEditInsertToolResult } from "@/types/tools"; import type { ToolConfiguration, ToolFactory } from "@/utils/tools/tools"; @@ -7,6 +6,9 @@ import { TOOL_DEFINITIONS } from "@/utils/tools/toolDefinitions"; import { validatePathInCwd } from "./fileCommon"; import { WRITE_DENIED_PREFIX } from "@/types/tools"; import { executeFileEditOperation } from "./file_edit_operation"; +import { RuntimeError } from "@/runtime/Runtime"; +import { fileExists } from "@/utils/runtime/fileExists"; +import { writeFileString } from "@/utils/runtime/helpers"; /** * File edit insert tool factory for AI assistant @@ -24,7 +26,7 @@ export const createFileEditInsertTool: ToolFactory = (config: ToolConfiguration) create, }): Promise => { try { - const pathValidation = validatePathInCwd(file_path, config.cwd); + const pathValidation = validatePathInCwd(file_path, config.cwd, config.runtime); if (pathValidation) { return { success: false, @@ -43,12 +45,10 @@ export const createFileEditInsertTool: ToolFactory = (config: ToolConfiguration) ? file_path : path.resolve(config.cwd, file_path); - let fileExists = await fs - .stat(resolvedPath) - .then((stats) => stats.isFile()) - .catch(() => false); + // Check if file exists using runtime + const exists = await fileExists(config.runtime, resolvedPath); - if (!fileExists) { + if (!exists) { if (!create) { return { success: false, @@ -56,10 +56,18 @@ export const createFileEditInsertTool: ToolFactory = (config: ToolConfiguration) }; } - const parentDir = path.dirname(resolvedPath); - await fs.mkdir(parentDir, { recursive: true }); - await fs.writeFile(resolvedPath, ""); - fileExists = true; + // Create empty file using runtime helper + try { + await writeFileString(config.runtime, resolvedPath, ""); + } catch (err) { + if (err instanceof RuntimeError) { + return { + success: false, + error: `${WRITE_DENIED_PREFIX} ${err.message}`, + }; + } + throw err; + } } return executeFileEditOperation({ diff --git a/src/services/tools/file_edit_operation.test.ts b/src/services/tools/file_edit_operation.test.ts index 67bb5ab74c..927aae83be 100644 --- a/src/services/tools/file_edit_operation.test.ts +++ b/src/services/tools/file_edit_operation.test.ts @@ -1,15 +1,20 @@ -import { describe, it, expect } from "bun:test"; +import { describe, test, expect } from "@jest/globals"; import { executeFileEditOperation } from "./file_edit_operation"; import { WRITE_DENIED_PREFIX } from "@/types/tools"; +import { createRuntime } from "@/runtime/runtimeFactory"; const TEST_CWD = "/tmp"; function createConfig() { - return { cwd: TEST_CWD, tempDir: "/tmp" }; + return { + cwd: TEST_CWD, + runtime: createRuntime({ type: "local", srcBaseDir: TEST_CWD }), + tempDir: "/tmp", + }; } describe("executeFileEditOperation", () => { - it("should return error when path validation fails", async () => { + test("should return error when path validation fails", async () => { const result = await executeFileEditOperation({ config: createConfig(), filePath: "../../etc/passwd", diff --git a/src/services/tools/file_edit_operation.ts b/src/services/tools/file_edit_operation.ts index 97a8e98721..14b922357f 100644 --- a/src/services/tools/file_edit_operation.ts +++ b/src/services/tools/file_edit_operation.ts @@ -1,10 +1,10 @@ -import * as fs from "fs/promises"; import * as path from "path"; -import writeFileAtomic from "write-file-atomic"; import type { FileEditDiffSuccessBase, FileEditErrorResult } from "@/types/tools"; import { WRITE_DENIED_PREFIX } from "@/types/tools"; import type { ToolConfiguration } from "@/utils/tools/tools"; import { generateDiff, validateFileSize, validatePathInCwd } from "./fileCommon"; +import { RuntimeError } from "@/runtime/Runtime"; +import { readFileString, writeFileString } from "@/utils/runtime/helpers"; type FileEditOperationResult = | { @@ -37,7 +37,7 @@ export async function executeFileEditOperation({ FileEditErrorResult | (FileEditDiffSuccessBase & TMetadata) > { try { - const pathValidation = validatePathInCwd(filePath, config.cwd); + const pathValidation = validatePathInCwd(filePath, config.cwd, config.runtime); if (pathValidation) { return { success: false, @@ -47,15 +47,28 @@ export async function executeFileEditOperation({ const resolvedPath = path.isAbsolute(filePath) ? filePath : path.resolve(config.cwd, filePath); - const stats = await fs.stat(resolvedPath); - if (!stats.isFile()) { + // Check if file exists and get stats using runtime + let fileStat; + try { + fileStat = await config.runtime.stat(resolvedPath); + } catch (err) { + if (err instanceof RuntimeError) { + return { + success: false, + error: `${WRITE_DENIED_PREFIX} ${err.message}`, + }; + } + throw err; + } + + if (fileStat.isDirectory) { return { success: false, - error: `${WRITE_DENIED_PREFIX} Path exists but is not a file: ${resolvedPath}`, + error: `${WRITE_DENIED_PREFIX} Path is a directory, not a file: ${resolvedPath}`, }; } - const sizeValidation = validateFileSize(stats); + const sizeValidation = validateFileSize(fileStat); if (sizeValidation) { return { success: false, @@ -63,7 +76,19 @@ export async function executeFileEditOperation({ }; } - const originalContent = await fs.readFile(resolvedPath, { encoding: "utf-8" }); + // Read file content using runtime helper + let originalContent: string; + try { + originalContent = await readFileString(config.runtime, resolvedPath); + } catch (err) { + if (err instanceof RuntimeError) { + return { + success: false, + error: `${WRITE_DENIED_PREFIX} ${err.message}`, + }; + } + throw err; + } const operationResult = await Promise.resolve(operation(originalContent)); if (!operationResult.success) { @@ -73,7 +98,18 @@ export async function executeFileEditOperation({ }; } - await writeFileAtomic(resolvedPath, operationResult.newContent, { encoding: "utf-8" }); + // Write file using runtime helper + try { + await writeFileString(config.runtime, resolvedPath, operationResult.newContent); + } catch (err) { + if (err instanceof RuntimeError) { + return { + success: false, + error: `${WRITE_DENIED_PREFIX} ${err.message}`, + }; + } + throw err; + } const diff = generateDiff(resolvedPath, originalContent, operationResult.newContent); diff --git a/src/services/tools/file_edit_replace.test.ts b/src/services/tools/file_edit_replace.test.ts index 6494cac811..0082028b4f 100644 --- a/src/services/tools/file_edit_replace.test.ts +++ b/src/services/tools/file_edit_replace.test.ts @@ -11,6 +11,7 @@ import type { FileEditReplaceLinesToolResult, } from "@/types/tools"; import type { ToolCallOptions } from "ai"; +import { createRuntime } from "@/runtime/runtimeFactory"; // Mock ToolCallOptions for testing const mockToolCallOptions: ToolCallOptions = { @@ -56,7 +57,11 @@ describe("file_edit_replace_string tool", () => { it("should apply a single edit successfully", async () => { await setupFile(testFilePath, "Hello world\nThis is a test\nGoodbye world"); - const tool = createFileEditReplaceStringTool({ cwd: testDir, tempDir: "/tmp" }); + const tool = createFileEditReplaceStringTool({ + cwd: testDir, + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), + tempDir: "/tmp", + }); const payload: FileEditReplaceStringToolArgs = { file_path: testFilePath, @@ -90,7 +95,11 @@ describe("file_edit_replace_lines tool", () => { it("should replace a line range successfully", async () => { await setupFile(testFilePath, "line1\nline2\nline3\nline4"); - const tool = createFileEditReplaceLinesTool({ cwd: testDir, tempDir: "/tmp" }); + const tool = createFileEditReplaceLinesTool({ + cwd: testDir, + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), + tempDir: "/tmp", + }); const payload: FileEditReplaceLinesToolArgs = { file_path: testFilePath, diff --git a/src/services/tools/file_read.test.ts b/src/services/tools/file_read.test.ts index 61129c85af..69a28fe69d 100644 --- a/src/services/tools/file_read.test.ts +++ b/src/services/tools/file_read.test.ts @@ -6,6 +6,7 @@ import { createFileReadTool } from "./file_read"; import type { FileReadToolArgs, FileReadToolResult } from "@/types/tools"; import type { ToolCallOptions } from "ai"; import { TestTempDir } from "./testHelpers"; +import { createRuntime } from "@/runtime/runtimeFactory"; // Mock ToolCallOptions for testing const mockToolCallOptions: ToolCallOptions = { @@ -19,6 +20,7 @@ function createTestFileReadTool(options?: { cwd?: string }) { const tempDir = new TestTempDir("test-file-read"); const tool = createFileReadTool({ cwd: options?.cwd ?? process.cwd(), + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), tempDir: tempDir.path, }); @@ -197,7 +199,7 @@ describe("file_read tool", () => { // Assert expect(result.success).toBe(false); if (!result.success) { - expect(result.error).toContain("File not found"); + expect(result.error).toMatch(/File not found|Failed to stat.*ENOENT/); } }); @@ -330,7 +332,11 @@ describe("file_read tool", () => { await fs.mkdir(subDir); // Try to read file outside cwd by going up - const tool = createFileReadTool({ cwd: subDir, tempDir: "/tmp" }); + const tool = createFileReadTool({ + cwd: subDir, + runtime: createRuntime({ type: "local", srcBaseDir: "/tmp" }), + tempDir: "/tmp", + }); const args: FileReadToolArgs = { filePath: "../test.txt", // This goes outside subDir back to testDir }; diff --git a/src/services/tools/file_read.ts b/src/services/tools/file_read.ts index 3c1227da63..9bf9c6d1c7 100644 --- a/src/services/tools/file_read.ts +++ b/src/services/tools/file_read.ts @@ -1,10 +1,11 @@ import { tool } from "ai"; -import * as fs from "fs/promises"; import * as path from "path"; import type { FileReadToolResult } from "@/types/tools"; import type { ToolConfiguration, ToolFactory } from "@/utils/tools/tools"; import { TOOL_DEFINITIONS } from "@/utils/tools/toolDefinitions"; import { validatePathInCwd, validateFileSize } from "./fileCommon"; +import { RuntimeError } from "@/runtime/Runtime"; +import { readFileString } from "@/utils/runtime/helpers"; /** * File read tool factory for AI assistant @@ -22,7 +23,7 @@ export const createFileReadTool: ToolFactory = (config: ToolConfiguration) => { // Note: abortSignal available but not used - file reads are fast and complete quickly try { // Validate that the path is within the working directory - const pathValidation = validatePathInCwd(filePath, config.cwd); + const pathValidation = validatePathInCwd(filePath, config.cwd, config.runtime); if (pathValidation) { return { success: false, @@ -35,17 +36,29 @@ export const createFileReadTool: ToolFactory = (config: ToolConfiguration) => { ? filePath : path.resolve(config.cwd, filePath); - // Check if file exists - const stats = await fs.stat(resolvedPath); - if (!stats.isFile()) { + // Check if file exists using runtime + let fileStat; + try { + fileStat = await config.runtime.stat(resolvedPath); + } catch (err) { + if (err instanceof RuntimeError) { + return { + success: false, + error: err.message, + }; + } + throw err; + } + + if (fileStat.isDirectory) { return { success: false, - error: `Path exists but is not a file: ${resolvedPath}`, + error: `Path is a directory, not a file: ${resolvedPath}`, }; } // Validate file size - const sizeValidation = validateFileSize(stats); + const sizeValidation = validateFileSize(fileStat); if (sizeValidation) { return { success: false, @@ -53,8 +66,19 @@ export const createFileReadTool: ToolFactory = (config: ToolConfiguration) => { }; } - // Read full file content - const fullContent = await fs.readFile(resolvedPath, { encoding: "utf-8" }); + // Read full file content using runtime helper + let fullContent: string; + try { + fullContent = await readFileString(config.runtime, resolvedPath); + } catch (err) { + if (err instanceof RuntimeError) { + return { + success: false, + error: err.message, + }; + } + throw err; + } const startLineNumber = offset ?? 1; @@ -133,8 +157,8 @@ export const createFileReadTool: ToolFactory = (config: ToolConfiguration) => { // Return file info and content return { success: true, - file_size: stats.size, - modifiedTime: stats.mtime.toISOString(), + file_size: fileStat.size, + modifiedTime: fileStat.modifiedTime.toISOString(), lines_read: numberedLines.length, content, }; diff --git a/src/types/ipc.ts b/src/types/ipc.ts index 498ceb9405..7ae90ee34c 100644 --- a/src/types/ipc.ts +++ b/src/types/ipc.ts @@ -8,6 +8,7 @@ import type { ToolPolicy } from "@/utils/tools/toolPolicy"; import type { BashToolResult } from "./tools"; import type { Secret } from "./secrets"; import type { CmuxProviderOptions } from "./providerOptions"; +import type { RuntimeConfig } from "./runtime"; import type { StreamStartEvent, StreamDeltaEvent, @@ -225,7 +226,8 @@ export interface IPCApi { create( projectPath: string, branchName: string, - trunkBranch: string + trunkBranch: string, + runtimeConfig?: RuntimeConfig ): Promise< { success: true; metadata: FrontendWorkspaceMetadata } | { success: false; error: string } >; diff --git a/src/types/project.ts b/src/types/project.ts index 682aa4ace9..3de56e7d1c 100644 --- a/src/types/project.ts +++ b/src/types/project.ts @@ -3,6 +3,8 @@ * Kept lightweight for preload script usage. */ +import type { RuntimeConfig } from "./runtime"; + /** * Workspace configuration in config.json. * @@ -11,7 +13,8 @@ * "path": "~/.cmux/src/project/workspace-id", // Kept for backward compat * "id": "a1b2c3d4e5", // Stable workspace ID * "name": "feature-branch", // User-facing name - * "createdAt": "2024-01-01T00:00:00Z" // Creation timestamp + * "createdAt": "2024-01-01T00:00:00Z", // Creation timestamp + * "runtimeConfig": { ... } // Runtime config (local vs SSH) * } * * LEGACY FORMAT (old workspaces, still supported): @@ -33,6 +36,9 @@ export interface Workspace { /** ISO 8601 creation timestamp - optional for legacy */ createdAt?: string; + + /** Runtime configuration (local vs SSH) - optional, defaults to local */ + runtimeConfig?: RuntimeConfig; } export interface ProjectConfig { diff --git a/src/types/runtime.ts b/src/types/runtime.ts new file mode 100644 index 0000000000..e72d6ff77d --- /dev/null +++ b/src/types/runtime.ts @@ -0,0 +1,21 @@ +/** + * Runtime configuration types for workspace execution environments + */ + +export type RuntimeConfig = + | { + type: "local"; + /** Base directory where all workspaces are stored (e.g., ~/.cmux/src) */ + srcBaseDir: string; + } + | { + type: "ssh"; + /** SSH host (can be hostname, user@host, or SSH config alias) */ + host: string; + /** Base directory on remote host where all workspaces are stored */ + srcBaseDir: string; + /** Optional: Path to SSH private key (if not using ~/.ssh/config or ssh-agent) */ + identityFile?: string; + /** Optional: SSH port (default: 22) */ + port?: number; + }; diff --git a/src/types/workspace.ts b/src/types/workspace.ts index 3784f0f69d..718ddf19e6 100644 --- a/src/types/workspace.ts +++ b/src/types/workspace.ts @@ -34,6 +34,8 @@ export const WorkspaceMetadataSchema = z.object({ * - Directory name uses workspace.name (the branch name) * - This avoids storing redundant derived data */ +import type { RuntimeConfig } from "./runtime"; + export interface WorkspaceMetadata { /** Stable unique identifier (10 hex chars for new workspaces, legacy format for old) */ id: string; @@ -49,6 +51,9 @@ export interface WorkspaceMetadata { /** ISO 8601 timestamp of when workspace was created (optional for backward compatibility) */ createdAt?: string; + + /** Runtime configuration for this workspace (optional, defaults to local) */ + runtimeConfig?: RuntimeConfig; } /** diff --git a/src/utils/chatCommands.test.ts b/src/utils/chatCommands.test.ts new file mode 100644 index 0000000000..304031dd15 --- /dev/null +++ b/src/utils/chatCommands.test.ts @@ -0,0 +1,74 @@ +import { parseRuntimeString } from "./chatCommands"; + +describe("parseRuntimeString", () => { + const workspaceName = "test-workspace"; + + test("returns undefined for undefined runtime (default to local)", () => { + expect(parseRuntimeString(undefined, workspaceName)).toBeUndefined(); + }); + + test("returns undefined for explicit 'local' runtime", () => { + expect(parseRuntimeString("local", workspaceName)).toBeUndefined(); + expect(parseRuntimeString("LOCAL", workspaceName)).toBeUndefined(); + expect(parseRuntimeString(" local ", workspaceName)).toBeUndefined(); + }); + + test("parses valid SSH runtime", () => { + const result = parseRuntimeString("ssh user@host", workspaceName); + expect(result).toEqual({ + type: "ssh", + host: "user@host", + srcBaseDir: "~/cmux", + }); + }); + + test("preserves case in SSH host", () => { + const result = parseRuntimeString("ssh User@Host.Example.Com", workspaceName); + expect(result).toEqual({ + type: "ssh", + host: "User@Host.Example.Com", + srcBaseDir: "~/cmux", + }); + }); + + test("handles extra whitespace", () => { + const result = parseRuntimeString(" ssh user@host ", workspaceName); + expect(result).toEqual({ + type: "ssh", + host: "user@host", + srcBaseDir: "~/cmux", + }); + }); + + test("throws error for SSH without host", () => { + expect(() => parseRuntimeString("ssh", workspaceName)).toThrow("SSH runtime requires host"); + expect(() => parseRuntimeString("ssh ", workspaceName)).toThrow("SSH runtime requires host"); + }); + + test("accepts SSH with hostname only (user will be inferred)", () => { + const result = parseRuntimeString("ssh hostname", workspaceName); + expect(result).toEqual({ + type: "ssh", + host: "hostname", + srcBaseDir: "~/cmux", + }); + }); + + test("accepts SSH with hostname.domain only", () => { + const result = parseRuntimeString("ssh dev.example.com", workspaceName); + expect(result).toEqual({ + type: "ssh", + host: "dev.example.com", + srcBaseDir: "~/cmux", + }); + }); + + test("throws error for unknown runtime type", () => { + expect(() => parseRuntimeString("docker", workspaceName)).toThrow( + "Unknown runtime type: 'docker'" + ); + expect(() => parseRuntimeString("remote", workspaceName)).toThrow( + "Unknown runtime type: 'remote'" + ); + }); +}); diff --git a/src/utils/chatCommands.ts b/src/utils/chatCommands.ts index 0acdc412a5..cfaba52de6 100644 --- a/src/utils/chatCommands.ts +++ b/src/utils/chatCommands.ts @@ -9,6 +9,7 @@ import type { SendMessageOptions } from "@/types/ipc"; import type { CmuxFrontendMetadata, CompactionRequestData } from "@/types/message"; import type { FrontendWorkspaceMetadata } from "@/types/workspace"; +import type { RuntimeConfig } from "@/types/runtime"; import { CUSTOM_EVENTS } from "@/constants/events"; import type { Toast } from "@/components/ChatInputToast"; import type { ParsedCommand } from "@/utils/slashCommands/types"; @@ -19,10 +20,52 @@ import { resolveCompactionModel } from "@/utils/messages/compactionModelPreferen // Workspace Creation // ============================================================================ +/** + * Parse runtime string from -r flag into RuntimeConfig + * Supports formats: + * - "ssh " or "ssh " -> SSH runtime + * - "local" -> Local runtime (explicit) + * - undefined -> Local runtime (default) + */ +export function parseRuntimeString( + runtime: string | undefined, + _workspaceName: string +): RuntimeConfig | undefined { + if (!runtime) { + return undefined; // Default to local (backend decides) + } + + const trimmed = runtime.trim(); + const lowerTrimmed = trimmed.toLowerCase(); + + if (lowerTrimmed === "local") { + return undefined; // Explicit local - let backend use default + } + + // Parse "ssh " or "ssh " format + if (lowerTrimmed === "ssh" || lowerTrimmed.startsWith("ssh ")) { + const hostPart = trimmed.slice(3).trim(); // Preserve original case for host, skip "ssh" + if (!hostPart) { + throw new Error("SSH runtime requires host (e.g., 'ssh hostname' or 'ssh user@host')"); + } + + // Accept both "hostname" and "user@hostname" formats + // SSH will use current user or ~/.ssh/config if user not specified + return { + type: "ssh", + host: hostPart, + srcBaseDir: "~/cmux", // Default remote base directory (NOT including workspace name) + }; + } + + throw new Error(`Unknown runtime type: '${runtime}'. Use 'ssh ' or 'local'`); +} + export interface CreateWorkspaceOptions { projectPath: string; workspaceName: string; trunkBranch?: string; + runtime?: string; startMessage?: string; sendMessageOptions?: SendMessageOptions; } @@ -49,10 +92,14 @@ export async function createNewWorkspace( effectiveTrunk = recommendedTrunk ?? "main"; } + // Parse runtime config if provided + const runtimeConfig = parseRuntimeString(options.runtime, options.workspaceName); + const result = await window.api.workspace.create( options.projectPath, options.workspaceName, - effectiveTrunk + effectiveTrunk, + runtimeConfig ); if (!result.success) { @@ -88,12 +135,16 @@ export async function createNewWorkspace( export function formatNewCommand( workspaceName: string, trunkBranch?: string, + runtime?: string, startMessage?: string ): string { let cmd = `/new ${workspaceName}`; if (trunkBranch) { cmd += ` -t ${trunkBranch}`; } + if (runtime) { + cmd += ` -r '${runtime}'`; + } if (startMessage) { cmd += `\n${startMessage}`; } @@ -262,6 +313,7 @@ export async function handleNewCommand( projectPath: workspaceInfo.projectPath, workspaceName: parsed.workspaceName, trunkBranch: parsed.trunkBranch, + runtime: parsed.runtime, startMessage: parsed.startMessage, sendMessageOptions, }); diff --git a/src/utils/commands/sources.test.ts b/src/utils/commands/sources.test.ts index b2e98e13c4..02f32fbcea 100644 --- a/src/utils/commands/sources.test.ts +++ b/src/utils/commands/sources.test.ts @@ -34,9 +34,6 @@ const mk = (over: Partial[0]> = {}) => { streamingModels: new Map(), getThinkingLevel: () => "off", onSetThinkingLevel: () => undefined, - onCreateWorkspace: async (_projectPath, _branchName, _trunkBranch) => { - await Promise.resolve(); - }, onOpenNewWorkspaceModal: () => undefined, onSelectWorkspace: () => undefined, onRemoveWorkspace: () => Promise.resolve({ success: true }), diff --git a/src/utils/commands/sources.ts b/src/utils/commands/sources.ts index 9c5fa97936..6a24e2644c 100644 --- a/src/utils/commands/sources.ts +++ b/src/utils/commands/sources.ts @@ -23,11 +23,6 @@ export interface BuildSourcesParams { onSetThinkingLevel: (workspaceId: string, level: ThinkingLevel) => void; onOpenNewWorkspaceModal: (projectPath: string) => void; - onCreateWorkspace: ( - projectPath: string, - branchName: string, - trunkBranch: string - ) => Promise; getBranchesForProject: (projectPath: string) => Promise; onSelectWorkspace: (sel: { projectPath: string; @@ -437,15 +432,6 @@ export function buildCoreSources(p: BuildSourcesParams): Array<() => CommandActi // Projects actions.push(() => { - const branchCache = new Map(); - const getBranchInfoForProject = async (projectPath: string) => { - const cached = branchCache.get(projectPath); - if (cached) return cached; - const info = await p.getBranchesForProject(projectPath); - branchCache.set(projectPath, info); - return info; - }; - const list: CommandAction[] = [ { id: "project:add", @@ -473,46 +459,11 @@ export function buildCoreSources(p: BuildSourcesParams): Array<() => CommandActi keywords: [projectPath], })), }, - { - type: "text", - name: "branchName", - label: "Workspace branch name", - placeholder: "Enter branch name", - validate: (v) => (!v.trim() ? "Branch name is required" : null), - }, - { - type: "select", - name: "trunkBranch", - label: "Trunk branch", - placeholder: "Search branches…", - getOptions: async (values) => { - if (!values.projectPath) return []; - const info = await getBranchInfoForProject(values.projectPath); - return info.branches.map((branch) => ({ - id: branch, - label: branch, - keywords: [branch], - })); - }, - }, ], - onSubmit: async (vals) => { + onSubmit: (vals) => { const projectPath = vals.projectPath; - const trimmedBranchName = vals.branchName.trim(); - const info = await getBranchInfoForProject(projectPath); - const providedTrunk = vals.trunkBranch?.trim(); - const resolvedTrunk = - providedTrunk && info.branches.includes(providedTrunk) - ? providedTrunk - : info.branches.includes(info.recommendedTrunk) - ? info.recommendedTrunk - : info.branches[0]; - - if (!resolvedTrunk) { - throw new Error("Unable to determine trunk branch for workspace creation"); - } - - await p.onCreateWorkspace(projectPath, trimmedBranchName, resolvedTrunk); + // Open the New Workspace Modal for the selected project + p.onOpenNewWorkspaceModal(projectPath); }, }, }, diff --git a/src/utils/errors.ts b/src/utils/errors.ts new file mode 100644 index 0000000000..90e0e3ce79 --- /dev/null +++ b/src/utils/errors.ts @@ -0,0 +1,7 @@ +/** + * Extract a string message from an unknown error value + * Handles Error objects and other thrown values consistently + */ +export function getErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); +} diff --git a/src/utils/runtime/fileExists.ts b/src/utils/runtime/fileExists.ts new file mode 100644 index 0000000000..7f370faef6 --- /dev/null +++ b/src/utils/runtime/fileExists.ts @@ -0,0 +1,16 @@ +import type { Runtime } from "@/runtime/Runtime"; + +/** + * Check if a path exists using runtime.stat() + * @param runtime Runtime instance to use + * @param path Path to check + * @returns True if path exists, false otherwise + */ +export async function fileExists(runtime: Runtime, path: string): Promise { + try { + await runtime.stat(path); + return true; + } catch { + return false; + } +} diff --git a/src/utils/runtime/helpers.ts b/src/utils/runtime/helpers.ts new file mode 100644 index 0000000000..b6b5e57316 --- /dev/null +++ b/src/utils/runtime/helpers.ts @@ -0,0 +1,116 @@ +import path from "path"; +import type { Runtime, ExecOptions } from "@/runtime/Runtime"; + +/** + * Convenience helpers for working with streaming Runtime APIs. + * These provide simple string-based APIs on top of the low-level streaming primitives. + */ + +/** + * Extract project name from a project path + * Works for both local paths and remote paths + */ +export function getProjectName(projectPath: string): string { + // For local paths, use path.basename + // For remote paths (containing /), use the last segment + return path.basename(projectPath); +} + +/** + * Result from executing a command with buffered output + */ +export interface ExecResult { + /** Standard output */ + stdout: string; + /** Standard error */ + stderr: string; + /** Exit code (0 = success) */ + exitCode: number; + /** Wall clock duration in milliseconds */ + duration: number; +} + +/** + * Execute a command and buffer all output into strings + */ +export async function execBuffered( + runtime: Runtime, + command: string, + options: ExecOptions & { stdin?: string } +): Promise { + const stream = await runtime.exec(command, options); + + // Write stdin if provided + if (options.stdin !== undefined) { + const writer = stream.stdin.getWriter(); + try { + await writer.write(new TextEncoder().encode(options.stdin)); + await writer.close(); + } catch (err) { + writer.releaseLock(); + throw err; + } + } else { + // Close stdin immediately if no input + await stream.stdin.close(); + } + + // Read stdout and stderr concurrently + const [stdout, stderr, exitCode, duration] = await Promise.all([ + streamToString(stream.stdout), + streamToString(stream.stderr), + stream.exitCode, + stream.duration, + ]); + + return { stdout, stderr, exitCode, duration }; +} + +/** + * Read file contents as a UTF-8 string + */ +export async function readFileString(runtime: Runtime, path: string): Promise { + const stream = runtime.readFile(path); + return streamToString(stream); +} + +/** + * Write string contents to a file atomically + */ +export async function writeFileString( + runtime: Runtime, + path: string, + content: string +): Promise { + const stream = runtime.writeFile(path); + const writer = stream.getWriter(); + try { + await writer.write(new TextEncoder().encode(content)); + await writer.close(); + } catch (err) { + writer.releaseLock(); + throw err; + } +} + +/** + * Convert a ReadableStream to a UTF-8 string + */ +async function streamToString(stream: ReadableStream): Promise { + const reader = stream.getReader(); + const decoder = new TextDecoder("utf-8"); + let result = ""; + + try { + while (true) { + const { done, value } = await reader.read(); + if (done) break; + result += decoder.decode(value, { stream: true }); + } + // Final flush + result += decoder.decode(); + return result; + } finally { + reader.releaseLock(); + } +} diff --git a/src/utils/slashCommands/registry.ts b/src/utils/slashCommands/registry.ts index 7d1d6038d6..9e16651c7c 100644 --- a/src/utils/slashCommands/registry.ts +++ b/src/utils/slashCommands/registry.ts @@ -493,7 +493,7 @@ const forkCommandDefinition: SlashCommandDefinition = { const newCommandDefinition: SlashCommandDefinition = { key: "new", description: - "Create new workspace with optional trunk branch. Use -t to specify trunk. Add start message on lines after the command.", + "Create new workspace with optional trunk branch and runtime. Use -t to specify trunk, -r for remote execution (e.g., 'ssh hostname' or 'ssh user@host'). Add start message on lines after the command.", handler: ({ rawInput }): ParsedCommand => { const { tokens: firstLineTokens, @@ -503,7 +503,7 @@ const newCommandDefinition: SlashCommandDefinition = { // Parse flags from first line using minimist const parsed = minimist(firstLineTokens, { - string: ["t"], + string: ["t", "r"], unknown: (arg: string) => { // Unknown flags starting with - are errors if (arg.startsWith("-")) { @@ -514,12 +514,15 @@ const newCommandDefinition: SlashCommandDefinition = { }); // Check for unknown flags - return undefined workspaceName to open modal - const unknownFlags = firstLineTokens.filter((token) => token.startsWith("-") && token !== "-t"); + const unknownFlags = firstLineTokens.filter( + (token) => token.startsWith("-") && token !== "-t" && token !== "-r" + ); if (unknownFlags.length > 0) { return { type: "new", workspaceName: undefined, trunkBranch: undefined, + runtime: undefined, startMessage: undefined, }; } @@ -530,6 +533,7 @@ const newCommandDefinition: SlashCommandDefinition = { type: "new", workspaceName: undefined, trunkBranch: undefined, + runtime: undefined, startMessage: undefined, }; } @@ -543,6 +547,7 @@ const newCommandDefinition: SlashCommandDefinition = { type: "new", workspaceName: undefined, trunkBranch: undefined, + runtime: undefined, startMessage: undefined, }; } @@ -553,10 +558,17 @@ const newCommandDefinition: SlashCommandDefinition = { trunkBranch = parsed.t.trim(); } + // Get runtime from -r flag + let runtime: string | undefined; + if (parsed.r !== undefined && typeof parsed.r === "string" && parsed.r.trim().length > 0) { + runtime = parsed.r.trim(); + } + return { type: "new", workspaceName, trunkBranch, + runtime, startMessage: remainingLines, }; }, diff --git a/src/utils/slashCommands/types.ts b/src/utils/slashCommands/types.ts index 996b318bb3..408fc2a8bb 100644 --- a/src/utils/slashCommands/types.ts +++ b/src/utils/slashCommands/types.ts @@ -23,7 +23,13 @@ export type ParsedCommand = | { type: "telemetry-help" } | { type: "fork"; newName: string; startMessage?: string } | { type: "fork-help" } - | { type: "new"; workspaceName?: string; trunkBranch?: string; startMessage?: string } + | { + type: "new"; + workspaceName?: string; + trunkBranch?: string; + runtime?: string; + startMessage?: string; + } | { type: "unknown-command"; command: string; subcommand?: string } | null; diff --git a/src/utils/tools/tools.ts b/src/utils/tools/tools.ts index b924dbd9ff..952abebef1 100644 --- a/src/utils/tools/tools.ts +++ b/src/utils/tools/tools.ts @@ -8,12 +8,16 @@ import { createProposePlanTool } from "@/services/tools/propose_plan"; import { createTodoWriteTool, createTodoReadTool } from "@/services/tools/todo"; import { log } from "@/services/log"; +import type { Runtime } from "@/runtime/Runtime"; + /** * Configuration for tools that need runtime context */ export interface ToolConfiguration { - /** Working directory for command execution (required) */ + /** Working directory for command execution - actual path in runtime's context (local or remote) */ cwd: string; + /** Runtime environment for executing commands and file operations */ + runtime: Runtime; /** Environment secrets to inject (optional) */ secrets?: Record; /** Process niceness level (optional, -20 to 19, lower = higher priority) */ diff --git a/src/utils/validation/workspaceValidation.ts b/src/utils/validation/workspaceValidation.ts index 0e7d983fa7..345d41cd99 100644 --- a/src/utils/validation/workspaceValidation.ts +++ b/src/utils/validation/workspaceValidation.ts @@ -5,7 +5,6 @@ * - Pattern: [a-z0-9_-]{1,64} */ export function validateWorkspaceName(name: string): { valid: boolean; error?: string } { - // eslint-disable-next-line @typescript-eslint/prefer-optional-chain if (!name || name.length === 0) { return { valid: false, error: "Workspace name cannot be empty" }; } diff --git a/tests/ipcMain/anthropic1MContext.test.ts b/tests/ipcMain/anthropic1MContext.test.ts index f3c0d6fcdb..34c60b27bc 100644 --- a/tests/ipcMain/anthropic1MContext.test.ts +++ b/tests/ipcMain/anthropic1MContext.test.ts @@ -20,6 +20,13 @@ describeIntegration("IpcMain anthropic 1M context integration tests", () => { jest.retryTimes(3, { logErrorsBeforeRetry: true }); } + // Load tokenizer modules once before all tests (takes ~14s) + // This ensures accurate token counts for API calls without timing out individual tests + beforeAll(async () => { + const { loadTokenizerModules } = await import("../../src/utils/main/tokenizer"); + await loadTokenizerModules(); + }, 30000); // 30s timeout for tokenizer loading + test.concurrent( "should handle larger context with 1M flag enabled vs standard limits", async () => { diff --git a/tests/ipcMain/createWorkspace.test.ts b/tests/ipcMain/createWorkspace.test.ts index 34337e41a9..e8c3f2550a 100644 --- a/tests/ipcMain/createWorkspace.test.ts +++ b/tests/ipcMain/createWorkspace.test.ts @@ -1,99 +1,725 @@ +/** + * Integration tests for WORKSPACE_CREATE IPC handler + * + * Tests both LocalRuntime and SSHRuntime without mocking to verify: + * - Workspace creation mechanics (git worktree, directory structure) + * - Branch handling (new vs existing branches) + * - Init hook execution with logging + * - Parity between runtime implementations + * + * Uses real IPC handlers, real git operations, and Docker SSH server. + */ + +import * as fs from "fs/promises"; +import * as path from "path"; +import { exec } from "child_process"; +import { promisify } from "util"; import { shouldRunIntegrationTests, createTestEnvironment, cleanupTestEnvironment } from "./setup"; +import type { TestEnvironment } from "./setup"; import { IPC_CHANNELS } from "../../src/constants/ipc-constants"; -import { createTempGitRepo, cleanupTempGitRepo } from "./helpers"; +import { createTempGitRepo, cleanupTempGitRepo, generateBranchName } from "./helpers"; import { detectDefaultTrunkBranch } from "../../src/git"; +import { + isDockerAvailable, + startSSHServer, + stopSSHServer, + type SSHServerConfig, +} from "../runtime/ssh-fixture"; +import type { RuntimeConfig } from "../../src/types/runtime"; +import type { FrontendWorkspaceMetadata } from "../../src/types/workspace"; + +const execAsync = promisify(exec); + +// Test constants +const TEST_TIMEOUT_MS = 60000; +const INIT_HOOK_WAIT_MS = 1500; // Wait for async init hook completion (local runtime) +const SSH_INIT_WAIT_MS = 7000; // SSH init includes sync + checkout + hook, takes longer +const CMUX_DIR = ".cmux"; +const INIT_HOOK_FILENAME = "init"; + +// Event type constants +const EVENT_PREFIX_WORKSPACE_CHAT = "workspace:chat:"; +const EVENT_TYPE_PREFIX_INIT = "init-"; +const EVENT_TYPE_INIT_OUTPUT = "init-output"; +const EVENT_TYPE_INIT_END = "init-end"; // Skip all tests if TEST_INTEGRATION is not set const describeIntegration = shouldRunIntegrationTests() ? describe : describe.skip; -describeIntegration("IpcMain create workspace integration tests", () => { - test.concurrent( - "should fail to create workspace with invalid name", - async () => { - const env = await createTestEnvironment(); - const tempGitRepo = await createTempGitRepo(); - - try { - // Test various invalid names - const invalidNames = [ - { name: "", expectedError: "empty" }, - { name: "My-Branch", expectedError: "lowercase" }, - { name: "branch name", expectedError: "lowercase" }, - { name: "branch@123", expectedError: "lowercase" }, - { name: "branch/test", expectedError: "lowercase" }, - { name: "branch\\test", expectedError: "lowercase" }, - { name: "branch.test", expectedError: "lowercase" }, - { name: "a".repeat(65), expectedError: "64 characters" }, - ]; - - const trunkBranch = await detectDefaultTrunkBranch(tempGitRepo); - - for (const { name, expectedError } of invalidNames) { - const createResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_CREATE, - tempGitRepo, - name, - trunkBranch - ); - expect(createResult.success).toBe(false); - expect(createResult.error.toLowerCase()).toContain(expectedError.toLowerCase()); - } - } finally { - await cleanupTestEnvironment(env); - await cleanupTempGitRepo(tempGitRepo); - } - }, - 15000 +// SSH server config (shared across all SSH tests) +let sshConfig: SSHServerConfig | undefined; + +// ============================================================================ +// Test Helpers +// ============================================================================ + +/** + * Type guard to check if an event is an init event with a type field + */ +function isInitEvent(data: unknown): data is { type: string } { + return ( + data !== null && + typeof data === "object" && + "type" in data && + typeof (data as { type: unknown }).type === "string" && + (data as { type: string }).type.startsWith(EVENT_TYPE_PREFIX_INIT) + ); +} + +/** + * Filter events by type + */ +function filterEventsByType( + events: Array<{ channel: string; data: unknown }>, + eventType: string +): Array<{ channel: string; data: { type: string } }> { + return events.filter((e) => isInitEvent(e.data) && e.data.type === eventType) as Array<{ + channel: string; + data: { type: string }; + }>; +} + +/** + * Set up event capture for init events on workspace chat channel + * Returns array that will be populated with captured events + */ +function setupInitEventCapture(env: TestEnvironment): Array<{ channel: string; data: unknown }> { + const capturedEvents: Array<{ channel: string; data: unknown }> = []; + const originalSend = env.mockWindow.webContents.send; + + env.mockWindow.webContents.send = ((channel: string, data: unknown) => { + if (channel.startsWith(EVENT_PREFIX_WORKSPACE_CHAT) && isInitEvent(data)) { + capturedEvents.push({ channel, data }); + } + originalSend.call(env.mockWindow.webContents, channel, data); + }) as typeof originalSend; + + return capturedEvents; +} + +/** + * Create init hook file in git repo + */ +async function createInitHook(repoPath: string, hookContent: string): Promise { + const cmuxDir = path.join(repoPath, CMUX_DIR); + await fs.mkdir(cmuxDir, { recursive: true }); + const initHookPath = path.join(cmuxDir, INIT_HOOK_FILENAME); + await fs.writeFile(initHookPath, hookContent, { mode: 0o755 }); +} + +/** + * Commit changes in git repo + */ +async function commitChanges(repoPath: string, message: string): Promise { + await execAsync(`git add -A && git commit -m "${message}"`, { + cwd: repoPath, + }); +} + +/** + * Create workspace and handle cleanup on test failure + * Returns result and cleanup function + */ +async function createWorkspaceWithCleanup( + env: TestEnvironment, + projectPath: string, + branchName: string, + trunkBranch: string, + runtimeConfig?: RuntimeConfig +): Promise<{ + result: + | { success: true; metadata: FrontendWorkspaceMetadata } + | { success: false; error: string }; + cleanup: () => Promise; +}> { + const result = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_CREATE, + projectPath, + branchName, + trunkBranch, + runtimeConfig ); - test.concurrent( - "should successfully create workspace with valid name", - async () => { - const env = await createTestEnvironment(); - const tempGitRepo = await createTempGitRepo(); - - try { - // Test various valid names (avoid "main" as it's already checked out in the repo) - const validNames = [ - "feature-branch", - "feature_branch", - "branch123", - "test-branch_123", - "x", // Single character - "b".repeat(64), // Max length - ]; - - const trunkBranch = await detectDefaultTrunkBranch(tempGitRepo); - - for (const name of validNames) { - const createResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_CREATE, - tempGitRepo, - name, - trunkBranch + const cleanup = async () => { + if (result.success) { + await env.mockIpcRenderer.invoke(IPC_CHANNELS.WORKSPACE_REMOVE, result.metadata.id); + } + }; + + return { result, cleanup }; +} + +describeIntegration("WORKSPACE_CREATE with both runtimes", () => { + beforeAll(async () => { + // Check if Docker is available (required for SSH tests) + if (!(await isDockerAvailable())) { + throw new Error( + "Docker is required for SSH runtime tests. Please install Docker or skip tests by unsetting TEST_INTEGRATION." + ); + } + + // Start SSH server (shared across all tests for speed) + console.log("Starting SSH server container for createWorkspace tests..."); + sshConfig = await startSSHServer(); + console.log(`SSH server ready on port ${sshConfig.port}`); + }, 60000); // 60s timeout for Docker operations + + afterAll(async () => { + if (sshConfig) { + console.log("Stopping SSH server container..."); + await stopSSHServer(sshConfig); + } + }, 30000); + + // Test matrix: Run tests for both local and SSH runtimes + describe.each<{ type: "local" | "ssh" }>([{ type: "local" }, { type: "ssh" }])( + "Runtime: $type", + ({ type }) => { + // Helper to build runtime config + const getRuntimeConfig = (branchName: string): RuntimeConfig | undefined => { + if (type === "ssh" && sshConfig) { + return { + type: "ssh", + host: `testuser@localhost`, + srcBaseDir: sshConfig.workdir, + identityFile: sshConfig.privateKeyPath, + port: sshConfig.port, + }; + } + return undefined; // undefined = defaults to local + }; + + // Get runtime-specific init wait time (SSH needs more time for rsync) + const getInitWaitTime = () => (type === "ssh" ? SSH_INIT_WAIT_MS : INIT_HOOK_WAIT_MS); + + describe("Branch handling", () => { + test.concurrent( + "creates new branch from trunk when branch doesn't exist", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + const branchName = generateBranchName("new-branch"); + const trunkBranch = await detectDefaultTrunkBranch(tempGitRepo); + const runtimeConfig = getRuntimeConfig(branchName); + + const { result, cleanup } = await createWorkspaceWithCleanup( + env, + tempGitRepo, + branchName, + trunkBranch, + runtimeConfig + ); + + expect(result.success).toBe(true); + if (!result.success) { + throw new Error( + `Failed to create workspace for new branch '${branchName}': ${result.error}` + ); + } + + // Verify workspace metadata + expect(result.metadata.id).toBeDefined(); + expect(result.metadata.namedWorkspacePath).toBeDefined(); + expect(result.metadata.projectName).toBeDefined(); + + await cleanup(); + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT_MS + ); + + test.concurrent( + "checks out existing branch when branch already exists", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + // Use existing "test-branch" created by createTempGitRepo + const branchName = "test-branch"; + const trunkBranch = await detectDefaultTrunkBranch(tempGitRepo); + const runtimeConfig = getRuntimeConfig(branchName); + + const { result, cleanup } = await createWorkspaceWithCleanup( + env, + tempGitRepo, + branchName, + trunkBranch, + runtimeConfig + ); + + expect(result.success).toBe(true); + if (!result.success) { + throw new Error( + `Failed to check out existing branch '${branchName}': ${result.error}` + ); + } + + expect(result.metadata.id).toBeDefined(); + + await cleanup(); + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT_MS + ); + }); + + describe("Init hook execution", () => { + test.concurrent( + "executes .cmux/init hook when present and streams logs", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + // Create and commit init hook + await createInitHook( + tempGitRepo, + `#!/bin/bash +echo "Init hook started" +echo "Installing dependencies..." +sleep 0.1 +echo "Build complete" >&2 +exit 0 +` + ); + await commitChanges(tempGitRepo, "Add init hook"); + + const branchName = generateBranchName("hook-test"); + const trunkBranch = await detectDefaultTrunkBranch(tempGitRepo); + const runtimeConfig = getRuntimeConfig(branchName); + + // Capture init events + const initEvents = setupInitEventCapture(env); + + const { result, cleanup } = await createWorkspaceWithCleanup( + env, + tempGitRepo, + branchName, + trunkBranch, + runtimeConfig + ); + + expect(result.success).toBe(true); + if (!result.success) { + throw new Error(`Failed to create workspace with init hook: ${result.error}`); + } + + // Wait for init hook to complete (runs asynchronously after workspace creation) + await new Promise((resolve) => setTimeout(resolve, getInitWaitTime())); + + // Verify init events were emitted + expect(initEvents.length).toBeGreaterThan(0); + + // Verify output events (stdout/stderr from hook) + const outputEvents = filterEventsByType(initEvents, EVENT_TYPE_INIT_OUTPUT); + expect(outputEvents.length).toBeGreaterThan(0); + + // Verify completion event + const endEvents = filterEventsByType(initEvents, EVENT_TYPE_INIT_END); + expect(endEvents.length).toBe(1); + + await cleanup(); + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT_MS + ); + + test.concurrent( + "handles init hook failure gracefully", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + // Create and commit failing init hook + await createInitHook( + tempGitRepo, + `#!/bin/bash +echo "Starting init..." +echo "Error occurred!" >&2 +exit 1 +` + ); + await commitChanges(tempGitRepo, "Add failing hook"); + + const branchName = generateBranchName("fail-hook"); + const trunkBranch = await detectDefaultTrunkBranch(tempGitRepo); + const runtimeConfig = getRuntimeConfig(branchName); + + // Capture init events + const initEvents = setupInitEventCapture(env); + + const { result, cleanup } = await createWorkspaceWithCleanup( + env, + tempGitRepo, + branchName, + trunkBranch, + runtimeConfig + ); + + // Workspace creation should succeed even if hook fails + expect(result.success).toBe(true); + if (!result.success) { + throw new Error(`Failed to create workspace with failing hook: ${result.error}`); + } + + // Wait for init hook to complete asynchronously + await new Promise((resolve) => setTimeout(resolve, getInitWaitTime())); + + // Verify init-end event with non-zero exit code + const endEvents = filterEventsByType(initEvents, EVENT_TYPE_INIT_END); + expect(endEvents.length).toBe(1); + + const endEventData = endEvents[0].data as { type: string; exitCode: number }; + expect(endEventData.exitCode).not.toBe(0); + // Exit code can be 1 (script failure) or 127 (command not found on some systems) + + await cleanup(); + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT_MS + ); + + test.concurrent( + "completes successfully when no init hook present", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + const branchName = generateBranchName("no-hook"); + const trunkBranch = await detectDefaultTrunkBranch(tempGitRepo); + const runtimeConfig = getRuntimeConfig(branchName); + + const { result, cleanup } = await createWorkspaceWithCleanup( + env, + tempGitRepo, + branchName, + trunkBranch, + runtimeConfig + ); + + expect(result.success).toBe(true); + if (!result.success) { + throw new Error(`Failed to create workspace without init hook: ${result.error}`); + } + + expect(result.metadata.id).toBeDefined(); + + await cleanup(); + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT_MS + ); + + // SSH-specific test: verify sync output appears in init stream + if (type === "ssh") { + test.concurrent( + "streams sync progress to init events (SSH only)", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + const branchName = generateBranchName("sync-test"); + const trunkBranch = await detectDefaultTrunkBranch(tempGitRepo); + const runtimeConfig = getRuntimeConfig(branchName); + + // Capture init events + const initEvents = setupInitEventCapture(env); + + const { result, cleanup } = await createWorkspaceWithCleanup( + env, + tempGitRepo, + branchName, + trunkBranch, + runtimeConfig + ); + + expect(result.success).toBe(true); + if (!result.success) { + throw new Error(`Failed to create workspace for sync test: ${result.error}`); + } + + // Wait for init to complete (includes sync + checkout) + await new Promise((resolve) => setTimeout(resolve, getInitWaitTime())); + + // Verify init events contain sync and checkout steps + const outputEvents = filterEventsByType(initEvents, EVENT_TYPE_INIT_OUTPUT); + const outputLines = outputEvents.map((e) => { + const data = e.data as { line?: string; isError?: boolean }; + return data.line ?? ""; + }); + + // Debug: Print all output including errors + console.log("=== ALL INIT OUTPUT ==="); + outputEvents.forEach((e) => { + const data = e.data as { line?: string; isError?: boolean }; + const prefix = data.isError ? "[ERROR]" : "[INFO] "; + console.log(prefix + (data.line ?? "")); + }); + console.log("=== END INIT OUTPUT ==="); + + // Verify key init phases appear in output + expect(outputLines.some((line) => line.includes("Syncing project files"))).toBe( + true + ); + expect(outputLines.some((line) => line.includes("Checking out branch"))).toBe(true); + + // Verify init-end event was emitted + const endEvents = filterEventsByType(initEvents, EVENT_TYPE_INIT_END); + expect(endEvents.length).toBe(1); + + await cleanup(); + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT_MS + ); + + test.concurrent( + "handles tilde (~/) paths correctly (SSH only)", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + const branchName = generateBranchName("tilde-test"); + const trunkBranch = await detectDefaultTrunkBranch(tempGitRepo); + + // Use ~/workspace/... path instead of absolute path + const tildeRuntimeConfig: RuntimeConfig = { + type: "ssh", + host: `testuser@localhost`, + srcBaseDir: `~/workspace`, + identityFile: sshConfig!.privateKeyPath, + port: sshConfig!.port, + }; + + const { result, cleanup } = await createWorkspaceWithCleanup( + env, + tempGitRepo, + branchName, + trunkBranch, + tildeRuntimeConfig + ); + + expect(result.success).toBe(true); + if (!result.success) { + throw new Error(`Failed to create workspace with tilde path: ${result.error}`); + } + + // Wait for init to complete + await new Promise((resolve) => setTimeout(resolve, getInitWaitTime())); + + // Verify workspace exists + expect(result.metadata.id).toBeDefined(); + expect(result.metadata.namedWorkspacePath).toBeDefined(); + + await cleanup(); + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT_MS + ); + + test.concurrent( + "handles tilde paths with init hooks (SSH only)", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + // Add init hook to repo + await createInitHook( + tempGitRepo, + `#!/bin/bash +echo "Init hook executed with tilde path" +` + ); + await commitChanges(tempGitRepo, "Add init hook for tilde test"); + + const branchName = generateBranchName("tilde-init-test"); + const trunkBranch = await detectDefaultTrunkBranch(tempGitRepo); + + // Use ~/workspace/... path instead of absolute path + const tildeRuntimeConfig: RuntimeConfig = { + type: "ssh", + host: `testuser@localhost`, + srcBaseDir: `~/workspace`, + identityFile: sshConfig!.privateKeyPath, + port: sshConfig!.port, + }; + + // Capture init events to verify hook output + const initEvents = setupInitEventCapture(env); + + const { result, cleanup } = await createWorkspaceWithCleanup( + env, + tempGitRepo, + branchName, + trunkBranch, + tildeRuntimeConfig + ); + + expect(result.success).toBe(true); + if (!result.success) { + throw new Error( + `Failed to create workspace with tilde path + init hook: ${result.error}` + ); + } + + // Wait for init to complete (including hook) + await new Promise((resolve) => setTimeout(resolve, getInitWaitTime())); + + // Verify init hook was executed + const outputEvents = filterEventsByType(initEvents, EVENT_TYPE_INIT_OUTPUT); + const outputLines = outputEvents.map((e) => { + const data = e.data as { line?: string }; + return data.line ?? ""; + }); + + // Debug: Print all output including errors + console.log("=== TILDE INIT HOOK OUTPUT ==="); + outputEvents.forEach((e) => { + const data = e.data as { line?: string; isError?: boolean }; + const prefix = data.isError ? "[ERROR]" : "[INFO] "; + console.log(prefix + (data.line ?? "")); + }); + console.log("=== END TILDE INIT HOOK OUTPUT ==="); + + expect(outputLines.some((line) => line.includes("Running init hook"))).toBe(true); + expect(outputLines.some((line) => line.includes("Init hook executed"))).toBe(true); + + await cleanup(); + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT_MS + ); + + test.concurrent( + "can execute commands in workspace immediately after creation (SSH only)", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + const branchName = generateBranchName("exec-test"); + const trunkBranch = await detectDefaultTrunkBranch(tempGitRepo); + const runtimeConfig = getRuntimeConfig(branchName); + + const { result, cleanup } = await createWorkspaceWithCleanup( + env, + tempGitRepo, + branchName, + trunkBranch, + runtimeConfig + ); + + expect(result.success).toBe(true); + if (!result.success) { + throw new Error(`Failed to create workspace: ${result.error}`); + } + + // Wait for init to complete + await new Promise((resolve) => setTimeout(resolve, getInitWaitTime())); + + // Try to execute a command in the workspace + const workspaceId = result.metadata.id; + const execResult = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_EXECUTE_BASH, + workspaceId, + "pwd" + ); + + expect(execResult.success).toBe(true); + if (!execResult.success) { + throw new Error(`Failed to exec in workspace: ${execResult.error}`); + } + + // Verify we got output from the command + expect(execResult.data).toBeDefined(); + expect(execResult.data.output).toBeDefined(); + expect(execResult.data.output!.trim().length).toBeGreaterThan(0); + + await cleanup(); + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT_MS ); - if (!createResult.success) { - console.error(`Failed to create workspace "${name}":`, createResult.error); - } - expect(createResult.success).toBe(true); - expect(createResult.metadata.id).toBeDefined(); - expect(createResult.metadata.namedWorkspacePath).toBeDefined(); - expect(createResult.metadata.namedWorkspacePath).toBeDefined(); - expect(createResult.metadata.projectName).toBeDefined(); - - // Clean up the workspace - if (createResult.metadata.id) { - await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_REMOVE, - createResult.metadata.id - ); - } } - } finally { - await cleanupTestEnvironment(env); - await cleanupTempGitRepo(tempGitRepo); - } - }, - 30000 + }); + + describe("Validation", () => { + test.concurrent( + "rejects invalid workspace names", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + const invalidCases = [ + { name: "", expectedErrorFragment: "empty" }, + { name: "My-Branch", expectedErrorFragment: "lowercase" }, + { name: "branch name", expectedErrorFragment: "lowercase" }, + { name: "branch@123", expectedErrorFragment: "lowercase" }, + { name: "a".repeat(65), expectedErrorFragment: "64 characters" }, + ]; + + const trunkBranch = await detectDefaultTrunkBranch(tempGitRepo); + + for (const { name, expectedErrorFragment } of invalidCases) { + const runtimeConfig = getRuntimeConfig(name); + const { result } = await createWorkspaceWithCleanup( + env, + tempGitRepo, + name, + trunkBranch, + runtimeConfig + ); + + expect(result.success).toBe(false); + + if (!result.success) { + expect(result.error.toLowerCase()).toContain(expectedErrorFragment.toLowerCase()); + } + } + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT_MS + ); + }); + } ); }); diff --git a/tests/ipcMain/executeBash.test.ts b/tests/ipcMain/executeBash.test.ts index bfbc9b3164..d551197372 100644 --- a/tests/ipcMain/executeBash.test.ts +++ b/tests/ipcMain/executeBash.test.ts @@ -151,7 +151,7 @@ describeIntegration("IpcMain executeBash integration tests", () => { expect(timeoutResult.success).toBe(true); expect(timeoutResult.data.success).toBe(false); - expect(timeoutResult.data.error).toContain("timed out"); + expect(timeoutResult.data.error).toContain("timeout"); // Clean up await env.mockIpcRenderer.invoke(IPC_CHANNELS.WORKSPACE_REMOVE, workspaceId); diff --git a/tests/ipcMain/forkWorkspace.test.ts b/tests/ipcMain/forkWorkspace.test.ts index 06529fd322..f804d3335a 100644 --- a/tests/ipcMain/forkWorkspace.test.ts +++ b/tests/ipcMain/forkWorkspace.test.ts @@ -33,6 +33,13 @@ describeIntegration("IpcMain fork workspace integration tests", () => { jest.retryTimes(3, { logErrorsBeforeRetry: true }); } + // Load tokenizer modules once before all tests (takes ~14s) + // This ensures accurate token counts for API calls without timing out individual tests + beforeAll(async () => { + const { loadTokenizerModules } = await import("../../src/utils/main/tokenizer"); + await loadTokenizerModules(); + }, 30000); // 30s timeout for tokenizer loading + test.concurrent( "should fail to fork workspace with invalid name", async () => { diff --git a/tests/ipcMain/helpers.ts b/tests/ipcMain/helpers.ts index 475a7d8d4c..f0333ba5b2 100644 --- a/tests/ipcMain/helpers.ts +++ b/tests/ipcMain/helpers.ts @@ -66,7 +66,8 @@ export async function createWorkspace( mockIpcRenderer: IpcRenderer, projectPath: string, branchName: string, - trunkBranch?: string + trunkBranch?: string, + runtimeConfig?: import("../../src/types/runtime").RuntimeConfig ): Promise< { success: true; metadata: WorkspaceMetadataWithPaths } | { success: false; error: string } > { @@ -79,7 +80,8 @@ export async function createWorkspace( IPC_CHANNELS.WORKSPACE_CREATE, projectPath, branchName, - resolvedTrunk + resolvedTrunk, + runtimeConfig )) as { success: true; metadata: WorkspaceMetadataWithPaths } | { success: false; error: string }; } @@ -146,24 +148,102 @@ export class EventCollector { pollInterval = Math.min(pollInterval * 1.5, 500); } - // Log diagnostic info on timeout - const eventTypes = this.events - .filter((e) => "type" in e) - .map((e) => (e as { type: string }).type); - console.warn( - `waitForEvent timeout: Expected "${eventType}" but got events: [${eventTypes.join(", ")}]` - ); + // Timeout - log detailed diagnostic info + this.logEventDiagnostics(`waitForEvent timeout: Expected "${eventType}"`); - // If there was a stream-error, log the error details - const errorEvent = this.events.find((e) => "type" in e && e.type === "stream-error"); - if (errorEvent && "error" in errorEvent) { - console.error("Stream error details:", errorEvent.error); - if ("errorType" in errorEvent) { - console.error("Stream error type:", errorEvent.errorType); + return null; + } + + /** + * Log detailed event diagnostics for debugging + * Includes timestamps, event types, tool calls, and error details + */ + logEventDiagnostics(context: string): void { + console.error(`\n${"=".repeat(80)}`); + console.error(`EVENT DIAGNOSTICS: ${context}`); + console.error(`${"=".repeat(80)}`); + console.error(`Workspace: ${this.workspaceId}`); + console.error(`Total events: ${this.events.length}`); + console.error(`\nEvent sequence:`); + + // Log all events with details + this.events.forEach((event, idx) => { + const timestamp = + "timestamp" in event ? new Date(event.timestamp as number).toISOString() : "no-ts"; + const type = "type" in event ? (event as { type: string }).type : "no-type"; + + console.error(` [${idx}] ${timestamp} - ${type}`); + + // Log tool call details + if (type === "tool-call-start" && "toolName" in event) { + console.error(` Tool: ${event.toolName}`); + if ("args" in event) { + console.error(` Args: ${JSON.stringify(event.args)}`); + } } - } - return null; + if (type === "tool-call-end" && "toolName" in event) { + console.error(` Tool: ${event.toolName}`); + if ("result" in event) { + const result = + typeof event.result === "string" + ? event.result.length > 100 + ? `${event.result.substring(0, 100)}... (${event.result.length} chars)` + : event.result + : JSON.stringify(event.result); + console.error(` Result: ${result}`); + } + } + + // Log error details + if (type === "stream-error") { + if ("error" in event) { + console.error(` Error: ${event.error}`); + } + if ("errorType" in event) { + console.error(` Error Type: ${event.errorType}`); + } + } + + // Log delta content (first 100 chars) + if (type === "stream-delta" && "delta" in event) { + const delta = + typeof event.delta === "string" + ? event.delta.length > 100 + ? `${event.delta.substring(0, 100)}...` + : event.delta + : JSON.stringify(event.delta); + console.error(` Delta: ${delta}`); + } + + // Log final content (first 200 chars) + if (type === "stream-end" && "content" in event) { + const content = + typeof event.content === "string" + ? event.content.length > 200 + ? `${event.content.substring(0, 200)}... (${event.content.length} chars)` + : event.content + : JSON.stringify(event.content); + console.error(` Content: ${content}`); + } + }); + + // Summary + const eventTypeCounts = this.events.reduce( + (acc, e) => { + const type = "type" in e ? (e as { type: string }).type : "unknown"; + acc[type] = (acc[type] || 0) + 1; + return acc; + }, + {} as Record + ); + + console.error(`\nEvent type counts:`); + Object.entries(eventTypeCounts).forEach(([type, count]) => { + console.error(` ${type}: ${count}`); + }); + + console.error(`${"=".repeat(80)}\n`); } /** @@ -211,19 +291,22 @@ export function createEventCollector( */ export function assertStreamSuccess(collector: EventCollector): void { const allEvents = collector.getEvents(); - const eventTypes = allEvents.filter((e) => "type" in e).map((e) => (e as { type: string }).type); // Check for stream-end if (!collector.hasStreamEnd()) { const errorEvent = allEvents.find((e) => "type" in e && e.type === "stream-error"); if (errorEvent && "error" in errorEvent) { + collector.logEventDiagnostics( + `Stream did not complete successfully. Got stream-error: ${errorEvent.error}` + ); throw new Error( `Stream did not complete successfully. Got stream-error: ${errorEvent.error}\n` + - `All events: [${eventTypes.join(", ")}]` + `See detailed event diagnostics above.` ); } + collector.logEventDiagnostics("Stream did not emit stream-end event"); throw new Error( - `Stream did not emit stream-end event.\n` + `All events: [${eventTypes.join(", ")}]` + `Stream did not emit stream-end event.\n` + `See detailed event diagnostics above.` ); } @@ -231,17 +314,19 @@ export function assertStreamSuccess(collector: EventCollector): void { if (collector.hasError()) { const errorEvent = allEvents.find((e) => "type" in e && e.type === "stream-error"); const errorMsg = errorEvent && "error" in errorEvent ? errorEvent.error : "unknown"; + collector.logEventDiagnostics(`Stream completed but also has error event: ${errorMsg}`); throw new Error( `Stream completed but also has error event: ${errorMsg}\n` + - `All events: [${eventTypes.join(", ")}]` + `See detailed event diagnostics above.` ); } // Check for final message const finalMessage = collector.getFinalMessage(); if (!finalMessage) { + collector.logEventDiagnostics("Stream completed but final message is missing"); throw new Error( - `Stream completed but final message is missing.\n` + `All events: [${eventTypes.join(", ")}]` + `Stream completed but final message is missing.\n` + `See detailed event diagnostics above.` ); } } @@ -299,6 +384,60 @@ export async function waitForFileExists(filePath: string, timeoutMs = 5000): Pro }, timeoutMs); } +/** + * Wait for init hook to complete by watching for init-end event + * More reliable than static sleeps + * Based on workspaceInitHook.test.ts pattern + */ +export async function waitForInitComplete( + env: import("./setup").TestEnvironment, + workspaceId: string, + timeoutMs = 5000 +): Promise { + const startTime = Date.now(); + let pollInterval = 50; + + while (Date.now() - startTime < timeoutMs) { + // Check for init-end event in sentEvents + const initEndEvent = env.sentEvents.find( + (e) => + e.channel === getChatChannel(workspaceId) && + typeof e.data === "object" && + e.data !== null && + "type" in e.data && + e.data.type === "init-end" + ); + + if (initEndEvent) { + // Check if init succeeded (exitCode === 0) + const exitCode = (initEndEvent.data as any).exitCode; + if (exitCode !== 0) { + // Collect all init output for debugging + const initOutputEvents = env.sentEvents.filter( + (e) => + e.channel === getChatChannel(workspaceId) && + typeof e.data === "object" && + e.data !== null && + "type" in e.data && + (e.data as any).type === "init-output" + ); + const output = initOutputEvents + .map((e) => (e.data as any).line) + .filter(Boolean) + .join("\n"); + throw new Error(`Init hook failed with exit code ${exitCode}:\n${output}`); + } + return; + } + + await new Promise((resolve) => setTimeout(resolve, pollInterval)); + pollInterval = Math.min(pollInterval * 1.5, 500); + } + + // Throw error on timeout - workspace creation must complete for tests to be valid + throw new Error(`Init did not complete within ${timeoutMs}ms - workspace may not be ready`); +} + /** * Wait for stream to complete successfully * Common pattern: create collector, wait for end, assert success diff --git a/tests/ipcMain/openai-web-search.test.ts b/tests/ipcMain/openai-web-search.test.ts index ba4a03f068..2670d16871 100644 --- a/tests/ipcMain/openai-web-search.test.ts +++ b/tests/ipcMain/openai-web-search.test.ts @@ -20,6 +20,13 @@ describeIntegration("OpenAI web_search integration tests", () => { jest.retryTimes(3, { logErrorsBeforeRetry: true }); } + // Load tokenizer modules once before all tests (takes ~14s) + // This ensures accurate token counts for API calls without timing out individual tests + beforeAll(async () => { + const { loadTokenizerModules } = await import("../../src/utils/main/tokenizer"); + await loadTokenizerModules(); + }, 30000); // 30s timeout for tokenizer loading + test.concurrent( "should handle reasoning + web_search without itemId errors", async () => { diff --git a/tests/ipcMain/removeWorkspace.test.ts b/tests/ipcMain/removeWorkspace.test.ts index d6ed1e6aaa..f7ffcf0ecc 100644 --- a/tests/ipcMain/removeWorkspace.test.ts +++ b/tests/ipcMain/removeWorkspace.test.ts @@ -1,275 +1,516 @@ -import { shouldRunIntegrationTests, createTestEnvironment, cleanupTestEnvironment } from "./setup"; +/** + * Integration tests for workspace deletion across Local and SSH runtimes + * + * Tests WORKSPACE_REMOVE IPC handler with both LocalRuntime (git worktrees) + * and SSHRuntime (plain directories), including force flag and submodule handling. + */ + +import * as fs from "fs/promises"; +import * as path from "path"; +import { + createTestEnvironment, + cleanupTestEnvironment, + shouldRunIntegrationTests, + preloadTestModules, + type TestEnvironment, +} from "./setup"; import { IPC_CHANNELS } from "../../src/constants/ipc-constants"; import { createTempGitRepo, cleanupTempGitRepo, - createWorkspace, generateBranchName, - waitForFileNotExists, addSubmodule, + waitForFileNotExists, + waitForInitComplete, } from "./helpers"; -import * as fs from "fs/promises"; +import { detectDefaultTrunkBranch } from "../../src/git"; +import { + isDockerAvailable, + startSSHServer, + stopSSHServer, + type SSHServerConfig, +} from "../runtime/ssh-fixture"; +import type { RuntimeConfig } from "../../src/types/runtime"; +import { execAsync } from "../../src/utils/disposableExec"; + +// Test constants +const TEST_TIMEOUT_LOCAL_MS = 20000; +const TEST_TIMEOUT_SSH_MS = 45000; +const INIT_HOOK_WAIT_MS = 1500; +const SSH_INIT_WAIT_MS = 7000; // Skip all tests if TEST_INTEGRATION is not set const describeIntegration = shouldRunIntegrationTests() ? describe : describe.skip; -describeIntegration("IpcMain remove workspace integration tests", () => { - test.concurrent( - "should successfully remove workspace and git worktree", - async () => { - const env = await createTestEnvironment(); - const tempGitRepo = await createTempGitRepo(); - - try { - const branchName = generateBranchName("remove-test"); - - // Create a workspace - const createResult = await createWorkspace(env.mockIpcRenderer, tempGitRepo, branchName); - expect(createResult.success).toBe(true); - if (!createResult.success) { - throw new Error("Failed to create workspace"); - } - - const { metadata } = createResult; - const workspacePath = metadata.namedWorkspacePath; - - // Verify the worktree exists - const worktreeExistsBefore = await fs - .access(workspacePath) - .then(() => true) - .catch(() => false); - expect(worktreeExistsBefore).toBe(true); - - // Get the worktree directory path before removing - const projectName = tempGitRepo.split("/").pop() || "unknown"; - const worktreeDirPath = `${env.config.srcDir}/${projectName}/${metadata.name}`; - const worktreeDirExistsBefore = await fs - .lstat(worktreeDirPath) - .then(() => true) - .catch(() => false); - expect(worktreeDirExistsBefore).toBe(true); - - // Remove the workspace - const removeResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_REMOVE, - metadata.id - ); - expect(removeResult.success).toBe(true); - - // Verify the worktree no longer exists - const worktreeRemoved = await waitForFileNotExists(workspacePath, 5000); - expect(worktreeRemoved).toBe(true); - - // Verify worktree directory is removed - const worktreeDirExistsAfter = await fs - .lstat(worktreeDirPath) - .then(() => true) - .catch(() => false); - expect(worktreeDirExistsAfter).toBe(false); - - // Verify workspace is no longer in config - const config = env.config.loadConfigOrDefault(); - const project = config.projects.get(tempGitRepo); - if (project) { - const workspaceStillInConfig = project.workspaces.some((w) => w.path === workspacePath); - expect(workspaceStillInConfig).toBe(false); - } - } finally { - await cleanupTestEnvironment(env); - await cleanupTempGitRepo(tempGitRepo); - } - }, - 15000 +// SSH server config (shared across all SSH tests) +let sshConfig: SSHServerConfig | undefined; + +// ============================================================================ +// Test Helpers +// ============================================================================ + +/** + * Create workspace helper and wait for init hook to complete + */ +async function createWorkspaceHelper( + env: TestEnvironment, + projectPath: string, + branchName: string, + runtimeConfig?: RuntimeConfig, + isSSH: boolean = false +): Promise<{ + workspaceId: string; + workspacePath: string; + cleanup: () => Promise; +}> { + const trunkBranch = await detectDefaultTrunkBranch(projectPath); + console.log( + `[createWorkspaceHelper] Creating workspace with trunk=${trunkBranch}, branch=${branchName}` ); - - test.concurrent( - "should handle removal of non-existent workspace gracefully", - async () => { - const env = await createTestEnvironment(); - - try { - // Try to remove a workspace that doesn't exist - const removeResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_REMOVE, - "non-existent-workspace-id" - ); - - // Should succeed (idempotent operation) - expect(removeResult.success).toBe(true); - } finally { - await cleanupTestEnvironment(env); - } - }, - 15000 + const result = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_CREATE, + projectPath, + branchName, + trunkBranch, + runtimeConfig ); - test.concurrent( - "should handle removal when worktree directory is already deleted", - async () => { - const env = await createTestEnvironment(); - const tempGitRepo = await createTempGitRepo(); - - try { - const branchName = generateBranchName("remove-deleted"); - - // Create a workspace - const createResult = await createWorkspace(env.mockIpcRenderer, tempGitRepo, branchName); - expect(createResult.success).toBe(true); - if (!createResult.success) { - throw new Error("Failed to create workspace"); - } - - const { metadata } = createResult; - const workspacePath = metadata.namedWorkspacePath; - - // Manually delete the worktree directory (simulating external deletion) - await fs.rm(workspacePath, { recursive: true, force: true }); - - // Verify it's gone - const worktreeExists = await fs - .access(workspacePath) - .then(() => true) - .catch(() => false); - expect(worktreeExists).toBe(false); - - // Remove the workspace via IPC - should succeed and prune stale worktree - const removeResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_REMOVE, - metadata.id - ); - expect(removeResult.success).toBe(true); - - // Verify workspace is no longer in config - const config = env.config.loadConfigOrDefault(); - const project = config.projects.get(tempGitRepo); - if (project) { - const workspaceStillInConfig = project.workspaces.some((w) => w.path === workspacePath); - expect(workspaceStillInConfig).toBe(false); - } - } finally { - await cleanupTestEnvironment(env); - await cleanupTempGitRepo(tempGitRepo); - } - }, - 15000 + if (!result.success) { + throw new Error(`Failed to create workspace: ${result.error}`); + } + + const workspaceId = result.metadata.id; + const workspacePath = result.metadata.namedWorkspacePath; + + // Wait for init hook to complete in real-time + await waitForInitComplete(env, workspaceId, isSSH ? SSH_INIT_WAIT_MS : INIT_HOOK_WAIT_MS); + + const cleanup = async () => { + await env.mockIpcRenderer.invoke(IPC_CHANNELS.WORKSPACE_REMOVE, workspaceId); + }; + + return { workspaceId, workspacePath, cleanup }; +} + +/** + * Execute bash command in workspace context (works for both local and SSH) + */ +async function executeBash( + env: TestEnvironment, + workspaceId: string, + command: string +): Promise<{ output: string; exitCode: number }> { + const result = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_EXECUTE_BASH, + workspaceId, + command ); - test.concurrent( - "should successfully remove clean workspace with submodule", - async () => { - const env = await createTestEnvironment(); - const tempGitRepo = await createTempGitRepo(); - - try { - // Add a real submodule (leftpad) to the main repo - await addSubmodule(tempGitRepo); - - const branchName = generateBranchName("remove-submodule-clean"); - - // Create a workspace with the repo that has a submodule - const createResult = await createWorkspace(env.mockIpcRenderer, tempGitRepo, branchName); - expect(createResult.success).toBe(true); - if (!createResult.success) { - throw new Error("Failed to create workspace"); - } - - const { metadata } = createResult; - const workspacePath = metadata.namedWorkspacePath; - - // Initialize submodule in the worktree - const { exec } = await import("child_process"); - const { promisify } = await import("util"); - const execAsync = promisify(exec); - await execAsync("git submodule update --init", { cwd: workspacePath }); - - // Verify submodule is initialized - const submodulePath = await fs - .access(`${workspacePath}/vendor/left-pad`) - .then(() => true) - .catch(() => false); - expect(submodulePath).toBe(true); - - // Worktree is clean (no uncommitted changes) - // Should succeed via rename strategy (bypasses git worktree remove) - const removeResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_REMOVE, - metadata.id - ); - expect(removeResult.success).toBe(true); - - // Verify the worktree no longer exists - const worktreeRemoved = await waitForFileNotExists(workspacePath, 5000); - expect(worktreeRemoved).toBe(true); - } finally { - await cleanupTestEnvironment(env); - await cleanupTempGitRepo(tempGitRepo); - } - }, - 30000 + if (!result.success) { + throw new Error(`Bash execution failed: ${result.error}`); + } + + // Result is wrapped in Ok(), so data is the BashToolResult + const bashResult = result.data; + return { output: bashResult.output, exitCode: bashResult.exitCode }; +} + +/** + * Check if workspace directory exists (runtime-agnostic) + * This verifies the workspace root directory exists + */ +async function workspaceExists(env: TestEnvironment, workspaceId: string): Promise { + try { + // Try to execute a simple command in the workspace + // If workspace doesn't exist, this will fail + const result = await executeBash(env, workspaceId, `pwd`); + return result.exitCode === 0; + } catch { + return false; + } +} + +/** + * Make workspace dirty by modifying a tracked file (runtime-agnostic) + */ +async function makeWorkspaceDirty(env: TestEnvironment, workspaceId: string): Promise { + // Modify an existing tracked file (README.md exists in test repos) + // This ensures git will detect uncommitted changes + await executeBash( + env, + workspaceId, + 'echo "test modification to make workspace dirty" >> README.md' ); - - test.concurrent( - "should fail to remove dirty workspace with submodule, succeed with force", - async () => { - const env = await createTestEnvironment(); - const tempGitRepo = await createTempGitRepo(); - - try { - // Add a real submodule (leftpad) to the main repo - await addSubmodule(tempGitRepo); - - const branchName = generateBranchName("remove-submodule-dirty"); - - // Create a workspace with the repo that has a submodule - const createResult = await createWorkspace(env.mockIpcRenderer, tempGitRepo, branchName); - expect(createResult.success).toBe(true); - if (!createResult.success) { - throw new Error("Failed to create workspace"); +} + +// ============================================================================ +// Test Suite +// ============================================================================ + +describeIntegration("Workspace deletion integration tests", () => { + beforeAll(async () => { + await preloadTestModules(); + + // Check if Docker is available (required for SSH tests) + if (!(await isDockerAvailable())) { + throw new Error( + "Docker is required for SSH runtime tests. Please install Docker or skip tests by unsetting TEST_INTEGRATION." + ); + } + + // Start SSH server (shared across all tests for speed) + console.log("Starting SSH server container for deletion tests..."); + sshConfig = await startSSHServer(); + console.log(`SSH server ready on port ${sshConfig.port}`); + }, 60000); + + afterAll(async () => { + if (sshConfig) { + console.log("Stopping SSH server container..."); + await stopSSHServer(sshConfig); + } + }, 30000); + + // Test matrix: Run tests for both local and SSH runtimes + describe.each<{ type: "local" | "ssh" }>([{ type: "local" }, { type: "ssh" }])( + "Runtime: $type", + ({ type }) => { + const TEST_TIMEOUT = type === "ssh" ? TEST_TIMEOUT_SSH_MS : TEST_TIMEOUT_LOCAL_MS; + + // Helper to build runtime config + const getRuntimeConfig = (_branchName: string): RuntimeConfig | undefined => { + if (type === "ssh" && sshConfig) { + return { + type: "ssh", + host: `testuser@localhost`, + srcBaseDir: sshConfig.workdir, // Base workdir, not including branch name + identityFile: sshConfig.privateKeyPath, + port: sshConfig.port, + }; } - - const { metadata } = createResult; - const workspacePath = metadata.namedWorkspacePath; - - // Initialize submodule in the worktree - const { exec } = await import("child_process"); - const { promisify } = await import("util"); - const execAsync = promisify(exec); - await execAsync("git submodule update --init", { cwd: workspacePath }); - - // Make worktree "dirty" to prevent the rename optimization - await fs.appendFile(`${workspacePath}/README.md`, "\\nmodified"); - - // First attempt should fail (dirty worktree with submodules) - const removeResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_REMOVE, - metadata.id + return undefined; // undefined = defaults to local + }; + + test.concurrent( + "should successfully delete workspace", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + const branchName = generateBranchName("delete-test"); + const runtimeConfig = getRuntimeConfig(branchName); + const { workspaceId, workspacePath } = await createWorkspaceHelper( + env, + tempGitRepo, + branchName, + runtimeConfig, + type === "ssh" + ); + + // Verify workspace exists (works for both local and SSH) + const existsBefore = await workspaceExists(env, workspaceId); + if (!existsBefore) { + console.error(`Workspace ${workspaceId} does not exist after creation`); + console.error(`workspacePath from metadata: ${workspacePath}`); + } + expect(existsBefore).toBe(true); + + // Delete the workspace + const deleteResult = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_REMOVE, + workspaceId + ); + + if (!deleteResult.success) { + console.error("Delete failed:", deleteResult.error); + } + expect(deleteResult.success).toBe(true); + + // Verify workspace is no longer in config + const config = env.config.loadConfigOrDefault(); + const project = config.projects.get(tempGitRepo); + if (project) { + const stillInConfig = project.workspaces.some((w) => w.id === workspaceId); + expect(stillInConfig).toBe(false); + } + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT + ); + + test.concurrent( + "should handle deletion of non-existent workspace gracefully", + async () => { + const env = await createTestEnvironment(); + + try { + // Try to delete a workspace that doesn't exist + const deleteResult = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_REMOVE, + "non-existent-workspace-id" + ); + + // Should succeed (idempotent operation) + expect(deleteResult.success).toBe(true); + } finally { + await cleanupTestEnvironment(env); + } + }, + TEST_TIMEOUT + ); + + test.concurrent( + "should handle deletion when directory is already deleted", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + const branchName = generateBranchName("already-deleted"); + const runtimeConfig = getRuntimeConfig(branchName); + const { workspaceId, workspacePath } = await createWorkspaceHelper( + env, + tempGitRepo, + branchName, + runtimeConfig, + type === "ssh" + ); + + // Manually delete the workspace directory using bash (works for both local and SSH) + await executeBash(env, workspaceId, 'cd .. && rm -rf "$(basename "$PWD")"'); + + // Verify it's gone (note: workspace is deleted, so we can't use executeBash on workspaceId anymore) + // We'll verify via the delete operation and config check + + // Delete via IPC - should succeed and prune stale metadata + const deleteResult = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_REMOVE, + workspaceId + ); + expect(deleteResult.success).toBe(true); + + // Verify workspace is no longer in config + const config = env.config.loadConfigOrDefault(); + const project = config.projects.get(tempGitRepo); + if (project) { + const stillInConfig = project.workspaces.some((w) => w.id === workspaceId); + expect(stillInConfig).toBe(false); + } + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT + ); + + test.concurrent( + "should fail to delete dirty workspace without force flag", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + const branchName = generateBranchName("delete-dirty"); + const runtimeConfig = getRuntimeConfig(branchName); + const { workspaceId } = await createWorkspaceHelper( + env, + tempGitRepo, + branchName, + runtimeConfig, + type === "ssh" + ); + + // Make workspace dirty by modifying a file through bash + await makeWorkspaceDirty(env, workspaceId); + + // Attempt to delete without force should fail + const deleteResult = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_REMOVE, + workspaceId + ); + expect(deleteResult.success).toBe(false); + expect(deleteResult.error).toMatch( + /uncommitted changes|worktree contains modified|contains modified or untracked files/i + ); + + // Verify workspace still exists + const stillExists = await workspaceExists(env, workspaceId); + expect(stillExists).toBe(true); + + // Cleanup: force delete for cleanup + await env.mockIpcRenderer.invoke(IPC_CHANNELS.WORKSPACE_REMOVE, workspaceId, { + force: true, + }); + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT + ); + + test.concurrent( + "should delete dirty workspace with force flag", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + const branchName = generateBranchName("delete-dirty-force"); + const runtimeConfig = getRuntimeConfig(branchName); + const { workspaceId } = await createWorkspaceHelper( + env, + tempGitRepo, + branchName, + runtimeConfig, + type === "ssh" + ); + + // Make workspace dirty through bash + await makeWorkspaceDirty(env, workspaceId); + + // Delete with force should succeed + const deleteResult = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_REMOVE, + workspaceId, + { force: true } + ); + expect(deleteResult.success).toBe(true); + + // Verify workspace is no longer in config + const config = env.config.loadConfigOrDefault(); + const project = config.projects.get(tempGitRepo); + if (project) { + const stillInConfig = project.workspaces.some((w) => w.id === workspaceId); + expect(stillInConfig).toBe(false); + } + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT + ); + + // Submodule tests only apply to local runtime (SSH doesn't use git worktrees) + if (type === "local") { + test.concurrent( + "should successfully delete clean workspace with submodule", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + // Add a real submodule to the main repo + await addSubmodule(tempGitRepo); + + const branchName = generateBranchName("delete-submodule-clean"); + const { workspaceId, workspacePath } = await createWorkspaceHelper( + env, + tempGitRepo, + branchName, + undefined, + false + ); + + // Initialize submodule in the worktree + using initProc = execAsync(`cd "${workspacePath}" && git submodule update --init`); + await initProc.result; + + // Verify submodule is initialized + const submoduleExists = await fs + .access(path.join(workspacePath, "vendor", "left-pad")) + .then(() => true) + .catch(() => false); + expect(submoduleExists).toBe(true); + + // Worktree has submodule - need force flag to delete via rm -rf fallback + const deleteResult = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_REMOVE, + workspaceId, + { force: true } + ); + if (!deleteResult.success) { + console.error("Delete with submodule failed:", deleteResult.error); + } + expect(deleteResult.success).toBe(true); + + // Verify workspace was deleted + const removed = await waitForFileNotExists(workspacePath, 5000); + expect(removed).toBe(true); + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + 30000 ); - expect(removeResult.success).toBe(false); - expect(removeResult.error).toContain("submodule"); - - // Verify worktree still exists - const worktreeStillExists = await fs - .access(workspacePath) - .then(() => true) - .catch(() => false); - expect(worktreeStillExists).toBe(true); - - // Retry with force should succeed - const forceRemoveResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_REMOVE, - metadata.id, - { force: true } + + test.concurrent( + "should fail to delete dirty workspace with submodule, succeed with force", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + // Add a real submodule to the main repo + await addSubmodule(tempGitRepo); + + const branchName = generateBranchName("delete-submodule-dirty"); + const { workspaceId, workspacePath } = await createWorkspaceHelper( + env, + tempGitRepo, + branchName, + undefined, + false + ); + + // Initialize submodule in the worktree + using initProc = execAsync(`cd "${workspacePath}" && git submodule update --init`); + await initProc.result; + + // Make worktree dirty + await fs.appendFile(path.join(workspacePath, "README.md"), "\nmodified"); + + // First attempt should fail (dirty worktree with submodules) + const deleteResult = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_REMOVE, + workspaceId + ); + expect(deleteResult.success).toBe(false); + expect(deleteResult.error).toMatch(/submodule/i); + + // Verify worktree still exists + const stillExists = await fs + .access(workspacePath) + .then(() => true) + .catch(() => false); + expect(stillExists).toBe(true); + + // Retry with force should succeed + const forceDeleteResult = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_REMOVE, + workspaceId, + { force: true } + ); + expect(forceDeleteResult.success).toBe(true); + + // Verify workspace was deleted + const removed = await waitForFileNotExists(workspacePath, 5000); + expect(removed).toBe(true); + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + 30000 ); - expect(forceRemoveResult.success).toBe(true); - - // Verify the worktree no longer exists - const worktreeRemoved = await waitForFileNotExists(workspacePath, 5000); - expect(worktreeRemoved).toBe(true); - } finally { - await cleanupTestEnvironment(env); - await cleanupTempGitRepo(tempGitRepo); } - }, - 30000 + } ); }); diff --git a/tests/ipcMain/renameWorkspace.test.ts b/tests/ipcMain/renameWorkspace.test.ts index 8abe6ba75e..bd3416089a 100644 --- a/tests/ipcMain/renameWorkspace.test.ts +++ b/tests/ipcMain/renameWorkspace.test.ts @@ -1,489 +1,293 @@ -import { setupWorkspace, shouldRunIntegrationTests, validateApiKeys } from "./setup"; -import { - sendMessageWithModel, - createEventCollector, - waitForFileExists, - waitForFileNotExists, - createWorkspace, -} from "./helpers"; -import { IPC_CHANNELS } from "../../src/constants/ipc-constants"; -import type { CmuxMessage } from "../../src/types/message"; +/** + * Integration tests for WORKSPACE_RENAME IPC handler + * + * Tests both LocalRuntime and SSHRuntime without mocking to verify: + * - Workspace renaming mechanics (git worktree mv, directory mv) + * - Config updates (workspace path, name, stable IDs) + * - Error handling (name conflicts, validation) + * - Parity between runtime implementations + * + * Uses real IPC handlers, real git operations, and Docker SSH server. + */ + import * as fs from "fs/promises"; -import * as fsSync from "fs"; +import * as path from "path"; +import { exec } from "child_process"; +import { promisify } from "util"; +import { shouldRunIntegrationTests, createTestEnvironment, cleanupTestEnvironment } from "./setup"; +import type { TestEnvironment } from "./setup"; +import { IPC_CHANNELS } from "../../src/constants/ipc-constants"; +import { createTempGitRepo, cleanupTempGitRepo, generateBranchName } from "./helpers"; +import { detectDefaultTrunkBranch } from "../../src/git"; +import { + isDockerAvailable, + startSSHServer, + stopSSHServer, + type SSHServerConfig, +} from "../runtime/ssh-fixture"; +import type { RuntimeConfig } from "../../src/types/runtime"; +import type { FrontendWorkspaceMetadata } from "../../src/types/workspace"; +import { waitForInitComplete } from "./helpers"; + +const execAsync = promisify(exec); + +// Test constants +const TEST_TIMEOUT_MS = 60000; +const INIT_HOOK_WAIT_MS = 1500; // Wait for async init hook completion (local runtime) +const SSH_INIT_WAIT_MS = 7000; // SSH init includes sync + checkout + hook, takes longer // Skip all tests if TEST_INTEGRATION is not set const describeIntegration = shouldRunIntegrationTests() ? describe : describe.skip; -// Validate API keys before running tests -if (shouldRunIntegrationTests()) { - validateApiKeys(["ANTHROPIC_API_KEY"]); -} - -describeIntegration("IpcMain rename workspace integration tests", () => { - test.concurrent( - "should successfully rename workspace and update all paths", - async () => { - const { env, workspaceId, workspacePath, tempGitRepo, branchName, cleanup } = - await setupWorkspace("anthropic"); - try { - // Add project and workspace to config via IPC - await env.mockIpcRenderer.invoke(IPC_CHANNELS.PROJECT_CREATE, tempGitRepo); - // Manually add workspace to the project (normally done by WORKSPACE_CREATE) - const projectsConfig = env.config.loadConfigOrDefault(); - const projectConfig = projectsConfig.projects.get(tempGitRepo); - if (projectConfig) { - projectConfig.workspaces.push({ - path: workspacePath, - id: workspaceId, - name: branchName, - }); - env.config.saveConfig(projectsConfig); - } - const oldSessionDir = env.config.getSessionDir(workspaceId); - const oldMetadataResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_GET_INFO, - workspaceId - ); - expect(oldMetadataResult).toBeTruthy(); - const oldWorkspacePath = oldMetadataResult.namedWorkspacePath; - - // Clear events before rename - env.sentEvents.length = 0; - - // Rename the workspace - const newName = "renamed-branch"; - const renameResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_RENAME, - workspaceId, - newName - ); - if (!renameResult.success) { - console.error("Rename failed:", renameResult.error); - } - expect(renameResult.success).toBe(true); - - // Get new workspace ID from backend (NEVER construct it in frontend) - expect(renameResult.data?.newWorkspaceId).toBeDefined(); - const newWorkspaceId = renameResult.data.newWorkspaceId; - const projectName = oldMetadataResult.projectName; // Still need this for assertions - - // With stable IDs, workspace ID should NOT change during rename - expect(newWorkspaceId).toBe(workspaceId); - - // Session directory should still be the same (stable IDs don't move directories) - const sessionDir = env.config.getSessionDir(workspaceId); - expect(sessionDir).toBe(oldSessionDir); - - // Verify metadata was updated (name changed, path changed, but ID stays the same) - const newMetadataResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_GET_INFO, - workspaceId // Use same workspace ID - ); - expect(newMetadataResult).toBeTruthy(); - expect(newMetadataResult.id).toBe(workspaceId); // ID unchanged - expect(newMetadataResult.name).toBe(newName); // Name updated - expect(newMetadataResult.projectName).toBe(projectName); - - // Path DOES change (directory is renamed from old name to new name) - const newWorkspacePath = newMetadataResult.namedWorkspacePath; - expect(newWorkspacePath).not.toBe(oldWorkspacePath); - expect(newWorkspacePath).toContain(newName); // New path includes new name - - // Verify config was updated with new path - const config = env.config.loadConfigOrDefault(); - let foundWorkspace = false; - for (const [, projectConfig] of config.projects.entries()) { - const workspace = projectConfig.workspaces.find((w) => w.path === newWorkspacePath); - if (workspace) { - foundWorkspace = true; - expect(workspace.name).toBe(newName); // Name updated in config - expect(workspace.id).toBe(workspaceId); // ID unchanged - break; - } - } - expect(foundWorkspace).toBe(true); - - // Verify metadata event was emitted (update existing workspace) - const metadataEvents = env.sentEvents.filter( - (e) => e.channel === IPC_CHANNELS.WORKSPACE_METADATA - ); - expect(metadataEvents.length).toBe(1); - // Event should be update of existing workspace - expect(metadataEvents[0].data).toMatchObject({ - workspaceId, - metadata: expect.objectContaining({ - id: workspaceId, - name: newName, - projectName, - }), - }); - } finally { - await cleanup(); - } - }, - 30000 // Increased timeout to debug hanging test - ); - - test.concurrent( - "should fail to rename if new name conflicts with existing workspace", - async () => { - const { env, workspaceId, tempGitRepo, cleanup } = await setupWorkspace("anthropic"); - try { - // Create a second workspace with a different branch - const secondBranchName = "conflict-branch"; - const createResult = await createWorkspace( - env.mockIpcRenderer, - tempGitRepo, - secondBranchName - ); - expect(createResult.success).toBe(true); - - // Try to rename first workspace to the second workspace's name - const renameResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_RENAME, - workspaceId, - secondBranchName - ); - expect(renameResult.success).toBe(false); - expect(renameResult.error).toContain("already exists"); - - // Verify original workspace still exists and wasn't modified - const metadataResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_GET_INFO, - workspaceId - ); - expect(metadataResult).toBeTruthy(); - expect(metadataResult.id).toBe(workspaceId); - } finally { - await cleanup(); - } - }, - 15000 +// SSH server config (shared across all SSH tests) +let sshConfig: SSHServerConfig | undefined; + +// ============================================================================ +// Test Helpers +// ============================================================================ + +/** + * Create workspace and handle cleanup on test failure + */ +async function createWorkspaceWithCleanup( + env: TestEnvironment, + projectPath: string, + branchName: string, + trunkBranch: string, + runtimeConfig?: RuntimeConfig +): Promise<{ + result: + | { success: true; metadata: FrontendWorkspaceMetadata } + | { success: false; error: string }; + cleanup: () => Promise; +}> { + const result = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_CREATE, + projectPath, + branchName, + trunkBranch, + runtimeConfig ); - test.concurrent( - "should succeed when renaming workspace to itself (no-op)", - async () => { - const { env, workspaceId, workspacePath, tempGitRepo, branchName, cleanup } = - await setupWorkspace("anthropic"); - try { - // Add project and workspace to config via IPC - await env.mockIpcRenderer.invoke(IPC_CHANNELS.PROJECT_CREATE, tempGitRepo); - // Manually add workspace to the project (normally done by WORKSPACE_CREATE) - const projectsConfig = env.config.loadConfigOrDefault(); - const projectConfig = projectsConfig.projects.get(tempGitRepo); - if (projectConfig) { - projectConfig.workspaces.push({ - path: workspacePath, - id: workspaceId, - name: branchName, - }); - env.config.saveConfig(projectsConfig); - } - - // Get current metadata - const oldMetadata = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_GET_INFO, - workspaceId - ); - expect(oldMetadata).toBeTruthy(); - - // Rename workspace to its current name - const renameResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_RENAME, - workspaceId, - branchName - ); - expect(renameResult.success).toBe(true); - expect(renameResult.data.newWorkspaceId).toBe(workspaceId); - - // Verify metadata unchanged - const newMetadata = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_GET_INFO, - workspaceId - ); - expect(newMetadata).toBeTruthy(); - expect(newMetadata.id).toBe(workspaceId); - expect(newMetadata.namedWorkspacePath).toBe(oldMetadata.namedWorkspacePath); - } finally { - await cleanup(); - } - }, - 15000 - ); + const cleanup = async () => { + if (result.success) { + await env.mockIpcRenderer.invoke(IPC_CHANNELS.WORKSPACE_REMOVE, result.metadata.id); + } + }; - test.concurrent( - "should fail to rename if workspace doesn't exist", - async () => { - const { env, cleanup } = await setupWorkspace("anthropic"); - try { - const nonExistentWorkspaceId = "nonexistent-workspace"; - const renameResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_RENAME, - nonExistentWorkspaceId, - "new-name" - ); - expect(renameResult.success).toBe(false); - expect(renameResult.error).toContain("metadata"); - } finally { - await cleanup(); - } - }, - 15000 - ); - - test.concurrent( - "should fail to rename with invalid workspace name", - async () => { - const { env, workspaceId, cleanup } = await setupWorkspace("anthropic"); - try { - // Test various invalid names - const invalidNames = [ - { name: "", expectedError: "empty" }, - { name: "My-Branch", expectedError: "lowercase" }, - { name: "branch name", expectedError: "lowercase" }, - { name: "branch@123", expectedError: "lowercase" }, - { name: "branch/test", expectedError: "lowercase" }, - { name: "a".repeat(65), expectedError: "64 characters" }, - ]; - - for (const { name, expectedError } of invalidNames) { - const renameResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_RENAME, - workspaceId, - name - ); - expect(renameResult.success).toBe(false); - expect(renameResult.error.toLowerCase()).toContain(expectedError.toLowerCase()); - } - - // Verify original workspace still exists and wasn't modified - const metadataResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_GET_INFO, - workspaceId - ); - expect(metadataResult).toBeTruthy(); - expect(metadataResult.id).toBe(workspaceId); - } finally { - await cleanup(); - } - }, - 15000 - ); - - test.concurrent( - "should preserve chat history after rename", - async () => { - const { env, workspaceId, workspacePath, tempGitRepo, branchName, cleanup } = - await setupWorkspace("anthropic"); - try { - // Add project and workspace to config via IPC - await env.mockIpcRenderer.invoke(IPC_CHANNELS.PROJECT_CREATE, tempGitRepo); - // Manually add workspace to the project (normally done by WORKSPACE_CREATE) - const projectsConfig = env.config.loadConfigOrDefault(); - const projectConfig = projectsConfig.projects.get(tempGitRepo); - if (projectConfig) { - projectConfig.workspaces.push({ - path: workspacePath, - id: workspaceId, - name: branchName, - }); - env.config.saveConfig(projectsConfig); - } - // Send a message to create some history - env.sentEvents.length = 0; - const result = await sendMessageWithModel(env.mockIpcRenderer, workspaceId, "What is 2+2?"); - expect(result.success).toBe(true); - - // Wait for response - const collector = createEventCollector(env.sentEvents, workspaceId); - await collector.waitForEvent("stream-end", 10000); - - // Clear events before rename - env.sentEvents.length = 0; - - // Rename the workspace - const newName = "renamed-with-history"; - const renameResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_RENAME, - workspaceId, - newName - ); - if (!renameResult.success) { - console.error("Rename failed:", renameResult.error); - } - expect(renameResult.success).toBe(true); - - // Get new workspace ID from result (don't construct it!) - const newWorkspaceId = renameResult.data.newWorkspaceId; - - // Verify chat history file was moved (with retry for timing) - const newSessionDir = env.config.getSessionDir(newWorkspaceId); - const chatHistoryPath = `${newSessionDir}/chat.jsonl`; - const chatHistoryExists = await waitForFileExists(chatHistoryPath); - expect(chatHistoryExists).toBe(true); - - // Verify we can read the history - const historyContent = await fs.readFile(chatHistoryPath, "utf-8"); - const lines = historyContent.trim().split("\n"); - expect(lines.length).toBeGreaterThan(0); - } finally { - await cleanup(); - } - }, - 30000 - ); - - test.concurrent( - "should support editing messages after rename", - async () => { - const { env, workspaceId, workspacePath, tempGitRepo, branchName, cleanup } = - await setupWorkspace("anthropic"); - try { - // Add project and workspace to config via IPC - await env.mockIpcRenderer.invoke(IPC_CHANNELS.PROJECT_CREATE, tempGitRepo); - // Manually add workspace to the project (normally done by WORKSPACE_CREATE) - const projectsConfig = env.config.loadConfigOrDefault(); - const projectConfig = projectsConfig.projects.get(tempGitRepo); - if (projectConfig) { - projectConfig.workspaces.push({ - path: workspacePath, - id: workspaceId, - name: branchName, - }); - env.config.saveConfig(projectsConfig); - } - - // Send a message to create history before rename - env.sentEvents.length = 0; - const result = await sendMessageWithModel( - env.mockIpcRenderer, - workspaceId, - "What is 2+2?", - "anthropic", - "claude-sonnet-4-5" - ); - expect(result.success).toBe(true); - - // Wait for response - const collector = createEventCollector(env.sentEvents, workspaceId); - await collector.waitForEvent("stream-end", 10000); - - // Get the user message from chat events for later editing - const chatMessages = env.sentEvents.filter( - (e) => - e.channel === `workspace:chat:${workspaceId}` && - typeof e.data === "object" && - e.data !== null && - "role" in e.data - ); - const userMessage = chatMessages.find((e) => (e.data as CmuxMessage).role === "user"); - expect(userMessage).toBeTruthy(); - const userMessageId = (userMessage!.data as CmuxMessage).id; - - // Clear events before rename - env.sentEvents.length = 0; - - // Rename the workspace - const newName = "renamed-edit-test"; - const renameResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_RENAME, - workspaceId, - newName - ); - expect(renameResult.success).toBe(true); - - // Get new workspace ID from result - const newWorkspaceId = renameResult.data.newWorkspaceId; - - // Clear events before edit - env.sentEvents.length = 0; - - // Edit the user message using the new workspace ID - // This is the critical test - editing should work after rename - const editResult = await sendMessageWithModel( - env.mockIpcRenderer, - newWorkspaceId, - "What is 3+3?", - "anthropic", - "claude-sonnet-4-5", - { editMessageId: userMessageId } - ); - expect(editResult.success).toBe(true); - - // Wait for response - const editCollector = createEventCollector(env.sentEvents, newWorkspaceId); - const streamEnd = await editCollector.waitForEvent("stream-end", 10000); - expect(streamEnd).toBeTruthy(); - - // Verify we got the edited user message and a successful response - editCollector.collect(); - const allEvents = editCollector.getEvents(); - - const editedUserMessage = allEvents.find( - (e) => - "role" in e && e.role === "user" && e.parts?.some((p: any) => p.text?.includes("3+3")) - ); - expect(editedUserMessage).toBeTruthy(); - - // Verify stream completed successfully (proves AI responded to edited message) - expect(streamEnd).toBeDefined(); - } finally { - await cleanup(); - } - }, - 30000 - ); + return { result, cleanup }; +} - test.concurrent( - "should fail to rename if workspace is currently streaming", - async () => { - const { env, workspaceId, tempGitRepo, branchName, cleanup } = - await setupWorkspace("anthropic"); - try { - // Add project and workspace to config via IPC - await env.mockIpcRenderer.invoke(IPC_CHANNELS.PROJECT_CREATE, tempGitRepo); - const projectsConfig = env.config.loadConfigOrDefault(); - const projectConfig = projectsConfig.projects.get(tempGitRepo); - if (projectConfig) { - const workspacePath = env.config.getWorkspacePath(tempGitRepo, branchName); - projectConfig.workspaces.push({ - path: workspacePath, - id: workspaceId, - name: branchName, - }); - env.config.saveConfig(projectsConfig); +describeIntegration("WORKSPACE_RENAME with both runtimes", () => { + beforeAll(async () => { + // Check if Docker is available (required for SSH tests) + if (!(await isDockerAvailable())) { + throw new Error( + "Docker is required for SSH runtime tests. Please install Docker or skip tests by unsetting TEST_INTEGRATION." + ); + } + + // Start SSH server (shared across all tests for speed) + console.log("Starting SSH server container for renameWorkspace tests..."); + sshConfig = await startSSHServer(); + console.log(`SSH server ready on port ${sshConfig.port}`); + }, 60000); // 60s timeout for Docker operations + + afterAll(async () => { + if (sshConfig) { + console.log("Stopping SSH server container..."); + await stopSSHServer(sshConfig); + } + }, 30000); + + // Test matrix: Run tests for both local and SSH runtimes + describe.each<{ type: "local" | "ssh" }>([{ type: "local" }, { type: "ssh" }])( + "Runtime: $type", + ({ type }) => { + // Helper to build runtime config + const getRuntimeConfig = (branchName: string): RuntimeConfig | undefined => { + if (type === "ssh" && sshConfig) { + return { + type: "ssh", + host: `testuser@localhost`, + srcBaseDir: sshConfig.workdir, + identityFile: sshConfig.privateKeyPath, + port: sshConfig.port, + }; } - - // Start a stream (don't await - we want it running) - sendMessageWithModel( - env.mockIpcRenderer, - workspaceId, - "What is 2+2?" // Simple query that should complete quickly - ); - - // Wait for stream to actually start - const collector = createEventCollector(env.sentEvents, workspaceId); - await collector.waitForEvent("stream-start", 5000); - - // Attempt to rename while streaming - should fail - const newName = "renamed-during-stream"; - const renameResult = await env.mockIpcRenderer.invoke( - IPC_CHANNELS.WORKSPACE_RENAME, - workspaceId, - newName - ); - - // Verify rename was blocked due to active stream - expect(renameResult.success).toBe(false); - expect(renameResult.error).toContain("stream is active"); - - // Wait for stream to complete - await collector.waitForEvent("stream-end", 10000); - } finally { - await cleanup(); - } - }, - 20000 + return undefined; // undefined = defaults to local + }; + + // Get runtime-specific init wait time (SSH needs more time for rsync) + const getInitWaitTime = () => (type === "ssh" ? SSH_INIT_WAIT_MS : INIT_HOOK_WAIT_MS); + + test.concurrent( + "should successfully rename workspace and update all paths", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + const branchName = generateBranchName("rename-test"); + const trunkBranch = await detectDefaultTrunkBranch(tempGitRepo); + const runtimeConfig = getRuntimeConfig(branchName); + + // Create workspace + const { result, cleanup } = await createWorkspaceWithCleanup( + env, + tempGitRepo, + branchName, + trunkBranch, + runtimeConfig + ); + + expect(result.success).toBe(true); + if (!result.success) { + throw new Error(`Failed to create workspace: ${result.error}`); + } + + const workspaceId = result.metadata.id; + const oldWorkspacePath = result.metadata.namedWorkspacePath; + const oldSessionDir = env.config.getSessionDir(workspaceId); + + // Wait for init hook to complete before renaming + await waitForInitComplete(env, workspaceId, getInitWaitTime()); + + // Clear events before rename + env.sentEvents.length = 0; + + // Rename the workspace + const newName = "renamed-branch"; + const renameResult = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_RENAME, + workspaceId, + newName + ); + + if (!renameResult.success) { + console.error("Rename failed:", renameResult.error); + } + expect(renameResult.success).toBe(true); + + // Get new workspace ID from backend (NEVER construct it in frontend) + expect(renameResult.data?.newWorkspaceId).toBeDefined(); + const newWorkspaceId = renameResult.data.newWorkspaceId; + + // With stable IDs, workspace ID should NOT change during rename + expect(newWorkspaceId).toBe(workspaceId); + + // Session directory should still be the same (stable IDs don't move directories) + const sessionDir = env.config.getSessionDir(workspaceId); + expect(sessionDir).toBe(oldSessionDir); + + // Verify metadata was updated (name changed, path changed, but ID stays the same) + const newMetadataResult = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_GET_INFO, + workspaceId // Use same workspace ID + ); + expect(newMetadataResult).toBeTruthy(); + expect(newMetadataResult.id).toBe(workspaceId); // ID unchanged + expect(newMetadataResult.name).toBe(newName); // Name updated + + // Path DOES change (directory is renamed from old name to new name) + const newWorkspacePath = newMetadataResult.namedWorkspacePath; + expect(newWorkspacePath).not.toBe(oldWorkspacePath); + expect(newWorkspacePath).toContain(newName); // New path includes new name + + // Verify config was updated with new path + const config = env.config.loadConfigOrDefault(); + let foundWorkspace = false; + for (const [, projectConfig] of config.projects.entries()) { + const workspace = projectConfig.workspaces.find((w) => w.path === newWorkspacePath); + if (workspace) { + foundWorkspace = true; + expect(workspace.name).toBe(newName); // Name updated in config + expect(workspace.id).toBe(workspaceId); // ID unchanged + break; + } + } + expect(foundWorkspace).toBe(true); + + // Verify metadata event was emitted (update existing workspace) + const metadataEvents = env.sentEvents.filter( + (e) => e.channel === IPC_CHANNELS.WORKSPACE_METADATA + ); + expect(metadataEvents.length).toBe(1); + + await cleanup(); + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT_MS + ); + + test.concurrent( + "should fail to rename if new name conflicts with existing workspace", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + const branchName = generateBranchName("first"); + const secondBranchName = generateBranchName("second"); + const trunkBranch = await detectDefaultTrunkBranch(tempGitRepo); + const runtimeConfig = getRuntimeConfig(branchName); + + // Create first workspace + const { result: firstResult, cleanup: firstCleanup } = await createWorkspaceWithCleanup( + env, + tempGitRepo, + branchName, + trunkBranch, + runtimeConfig + ); + expect(firstResult.success).toBe(true); + if (!firstResult.success) { + throw new Error(`Failed to create first workspace: ${firstResult.error}`); + } + + // Create second workspace + const { result: secondResult, cleanup: secondCleanup } = + await createWorkspaceWithCleanup( + env, + tempGitRepo, + secondBranchName, + trunkBranch, + runtimeConfig + ); + expect(secondResult.success).toBe(true); + if (!secondResult.success) { + throw new Error(`Failed to create second workspace: ${secondResult.error}`); + } + + // Try to rename first workspace to the second workspace's name + const renameResult = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_RENAME, + firstResult.metadata.id, + secondBranchName + ); + expect(renameResult.success).toBe(false); + expect(renameResult.error).toContain("already exists"); + + // Verify original workspace still exists and wasn't modified + const metadataResult = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_GET_INFO, + firstResult.metadata.id + ); + expect(metadataResult).toBeTruthy(); + expect(metadataResult.id).toBe(firstResult.metadata.id); + + await firstCleanup(); + await secondCleanup(); + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + TEST_TIMEOUT_MS + ); + } ); }); diff --git a/tests/ipcMain/resumeStream.test.ts b/tests/ipcMain/resumeStream.test.ts index 56e99101f5..fe693a8939 100644 --- a/tests/ipcMain/resumeStream.test.ts +++ b/tests/ipcMain/resumeStream.test.ts @@ -25,6 +25,13 @@ describeIntegration("IpcMain resumeStream integration tests", () => { jest.retryTimes(3, { logErrorsBeforeRetry: true }); } + // Load tokenizer modules once before all tests (takes ~14s) + // This ensures accurate token counts for API calls without timing out individual tests + beforeAll(async () => { + const { loadTokenizerModules } = await import("../../src/utils/main/tokenizer"); + await loadTokenizerModules(); + }, 30000); // 30s timeout for tokenizer loading + test.concurrent( "should resume interrupted stream without new user message", async () => { diff --git a/tests/ipcMain/runtimeExecuteBash.test.ts b/tests/ipcMain/runtimeExecuteBash.test.ts new file mode 100644 index 0000000000..5fe8df2d11 --- /dev/null +++ b/tests/ipcMain/runtimeExecuteBash.test.ts @@ -0,0 +1,268 @@ +/** + * Integration tests for bash execution across Local and SSH runtimes + * + * Tests bash tool using real IPC handlers on both LocalRuntime and SSHRuntime. + * + * Reuses test infrastructure from runtimeFileEditing.test.ts + */ + +import { + createTestEnvironment, + cleanupTestEnvironment, + shouldRunIntegrationTests, + validateApiKeys, + getApiKey, + setupProviders, +} from "./setup"; +import { IPC_CHANNELS } from "../../src/constants/ipc-constants"; +import { createTempGitRepo, cleanupTempGitRepo, generateBranchName } from "./helpers"; +import { + isDockerAvailable, + startSSHServer, + stopSSHServer, + type SSHServerConfig, +} from "../runtime/ssh-fixture"; +import type { RuntimeConfig } from "../../src/types/runtime"; +import type { ToolPolicy } from "../../src/utils/tools/toolPolicy"; +import { + createWorkspaceHelper, + sendMessageAndWait, + extractTextFromEvents, +} from "./test-helpers/runtimeTestHelpers"; + +// Test constants +const TEST_TIMEOUT_LOCAL_MS = 25000; +const TEST_TIMEOUT_SSH_MS = 45000; +const HAIKU_MODEL = "anthropic:claude-haiku-4-5"; + +// Tool policy: Only allow bash tool +const BASH_ONLY: ToolPolicy = [ + { regex_match: "bash", action: "enable" }, + { regex_match: "file_.*", action: "disable" }, +]; + +// Skip all tests if TEST_INTEGRATION is not set +const describeIntegration = shouldRunIntegrationTests() ? describe : describe.skip; + +// Validate API keys before running tests +if (shouldRunIntegrationTests()) { + validateApiKeys(["ANTHROPIC_API_KEY"]); +} + +// SSH server config (shared across all SSH tests) +let sshConfig: SSHServerConfig | undefined; + +describeIntegration("Runtime Bash Execution", () => { + beforeAll(async () => { + // Check if Docker is available (required for SSH tests) + if (!(await isDockerAvailable())) { + throw new Error( + "Docker is required for SSH runtime tests. Please install Docker or skip tests by unsetting TEST_INTEGRATION." + ); + } + + // Start SSH server (shared across all tests for speed) + console.log("Starting SSH server container for bash tests..."); + sshConfig = await startSSHServer(); + console.log(`SSH server ready on port ${sshConfig.port}`); + }, 60000); + + afterAll(async () => { + if (sshConfig) { + console.log("Stopping SSH server container..."); + await stopSSHServer(sshConfig); + } + }, 30000); + + // Test matrix: Run tests for both local and SSH runtimes + describe.each<{ type: "local" | "ssh" }>([{ type: "local" }, { type: "ssh" }])( + "Runtime: $type", + ({ type }) => { + // Helper to build runtime config + const getRuntimeConfig = (branchName: string): RuntimeConfig | undefined => { + if (type === "ssh" && sshConfig) { + return { + type: "ssh", + host: `testuser@localhost`, + srcBaseDir: `${sshConfig.workdir}/${branchName}`, + identityFile: sshConfig.privateKeyPath, + port: sshConfig.port, + }; + } + return undefined; // undefined = defaults to local + }; + + test.concurrent( + "should execute simple bash command", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + // Setup provider + await setupProviders(env.mockIpcRenderer, { + anthropic: { + apiKey: getApiKey("ANTHROPIC_API_KEY"), + }, + }); + + // Create workspace + const branchName = generateBranchName("bash-simple"); + const runtimeConfig = getRuntimeConfig(branchName); + const { workspaceId, cleanup } = await createWorkspaceHelper( + env, + tempGitRepo, + branchName, + runtimeConfig, + type === "ssh" + ); + + try { + // Ask AI to run a simple command + const events = await sendMessageAndWait( + env, + workspaceId, + 'Run the bash command "echo Hello World"', + HAIKU_MODEL, + BASH_ONLY + ); + + // Extract response text + const responseText = extractTextFromEvents(events); + + // Verify the command output appears in the response + expect(responseText.toLowerCase()).toContain("hello world"); + + // Verify bash tool was called + // Tool calls now emit tool-call-start and tool-call-end events (not tool-call-delta) + const toolCallStarts = events.filter((e: any) => e.type === "tool-call-start"); + const bashCall = toolCallStarts.find((e: any) => e.toolName === "bash"); + expect(bashCall).toBeDefined(); + } finally { + await cleanup(); + } + } finally { + await cleanupTempGitRepo(tempGitRepo); + await cleanupTestEnvironment(env); + } + }, + type === "ssh" ? TEST_TIMEOUT_SSH_MS : TEST_TIMEOUT_LOCAL_MS + ); + + test.concurrent( + "should handle bash command with environment variables", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + // Setup provider + await setupProviders(env.mockIpcRenderer, { + anthropic: { + apiKey: getApiKey("ANTHROPIC_API_KEY"), + }, + }); + + // Create workspace + const branchName = generateBranchName("bash-env"); + const runtimeConfig = getRuntimeConfig(branchName); + const { workspaceId, cleanup } = await createWorkspaceHelper( + env, + tempGitRepo, + branchName, + runtimeConfig, + type === "ssh" + ); + + try { + // Ask AI to run command that sets and uses env var + const events = await sendMessageAndWait( + env, + workspaceId, + 'Run bash command: export TEST_VAR="test123" && echo "Value: $TEST_VAR"', + HAIKU_MODEL, + BASH_ONLY + ); + + // Extract response text + const responseText = extractTextFromEvents(events); + + // Verify the env var value appears + expect(responseText).toContain("test123"); + + // Verify bash tool was called + // Tool calls now emit tool-call-start and tool-call-end events (not tool-call-delta) + const toolCallStarts = events.filter((e: any) => e.type === "tool-call-start"); + const bashCall = toolCallStarts.find((e: any) => e.toolName === "bash"); + expect(bashCall).toBeDefined(); + } finally { + await cleanup(); + } + } finally { + await cleanupTempGitRepo(tempGitRepo); + await cleanupTestEnvironment(env); + } + }, + type === "ssh" ? TEST_TIMEOUT_SSH_MS : TEST_TIMEOUT_LOCAL_MS + ); + + test.concurrent( + "should handle bash command with special characters", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + // Setup provider + await setupProviders(env.mockIpcRenderer, { + anthropic: { + apiKey: getApiKey("ANTHROPIC_API_KEY"), + }, + }); + + // Create workspace + const branchName = generateBranchName("bash-special"); + const runtimeConfig = getRuntimeConfig(branchName); + const { workspaceId, cleanup } = await createWorkspaceHelper( + env, + tempGitRepo, + branchName, + runtimeConfig, + type === "ssh" + ); + + try { + // Ask AI to run command with special chars + const events = await sendMessageAndWait( + env, + workspaceId, + 'Run bash: echo "Test with $dollar and \\"quotes\\" and `backticks`"', + HAIKU_MODEL, + BASH_ONLY + ); + + // Extract response text + const responseText = extractTextFromEvents(events); + + // Verify special chars were handled correctly + expect(responseText).toContain("dollar"); + expect(responseText).toContain("quotes"); + + // Verify bash tool was called + // Tool calls now emit tool-call-start and tool-call-end events (not tool-call-delta) + const toolCallStarts = events.filter((e: any) => e.type === "tool-call-start"); + const bashCall = toolCallStarts.find((e: any) => e.toolName === "bash"); + expect(bashCall).toBeDefined(); + } finally { + await cleanup(); + } + } finally { + await cleanupTempGitRepo(tempGitRepo); + await cleanupTestEnvironment(env); + } + }, + type === "ssh" ? TEST_TIMEOUT_SSH_MS : TEST_TIMEOUT_LOCAL_MS + ); + } + ); +}); diff --git a/tests/ipcMain/runtimeFileEditing.test.ts b/tests/ipcMain/runtimeFileEditing.test.ts new file mode 100644 index 0000000000..c93dacdd0f --- /dev/null +++ b/tests/ipcMain/runtimeFileEditing.test.ts @@ -0,0 +1,489 @@ +/** + * Integration tests for file editing tools across Local and SSH runtimes + * + * Tests file_read, file_edit_replace_string, and file_edit_insert tools + * using real IPC handlers on both LocalRuntime and SSHRuntime. + * + * Uses toolPolicy to restrict AI to only file tools (prevents bash circumvention). + */ + +import * as fs from "fs/promises"; +import * as path from "path"; +import { + createTestEnvironment, + cleanupTestEnvironment, + shouldRunIntegrationTests, + validateApiKeys, + getApiKey, + setupProviders, + preloadTestModules, + type TestEnvironment, +} from "./setup"; +import { IPC_CHANNELS, getChatChannel } from "../../src/constants/ipc-constants"; +import { createTempGitRepo, cleanupTempGitRepo, generateBranchName } from "./helpers"; +import { detectDefaultTrunkBranch } from "../../src/git"; +import { + isDockerAvailable, + startSSHServer, + stopSSHServer, + type SSHServerConfig, +} from "../runtime/ssh-fixture"; +import type { RuntimeConfig } from "../../src/types/runtime"; +import type { FrontendWorkspaceMetadata } from "../../src/types/workspace"; +import type { WorkspaceChatMessage } from "../../src/types/ipc"; +import type { ToolPolicy } from "../../src/utils/tools/toolPolicy"; + +// Test constants +const TEST_TIMEOUT_LOCAL_MS = 25000; // Includes init wait time +const TEST_TIMEOUT_SSH_MS = 60000; // SSH has more overhead (network, rsync, etc.) +const STREAM_TIMEOUT_LOCAL_MS = 15000; // Stream timeout for local runtime +const STREAM_TIMEOUT_SSH_MS = 25000; // SSH needs longer due to network latency +const HAIKU_MODEL = "anthropic:claude-haiku-4-5"; +const INIT_HOOK_WAIT_MS = 1500; // Wait for async init hook completion (local runtime) +const SSH_INIT_WAIT_MS = 7000; // SSH init includes sync + checkout + hook, takes longer + +// Tool policy: Only allow file tools (disable bash to isolate file tool issues) +const FILE_TOOLS_ONLY: ToolPolicy = [ + { regex_match: "file_.*", action: "enable" }, + { regex_match: "bash", action: "disable" }, +]; + +// Skip all tests if TEST_INTEGRATION is not set +const describeIntegration = shouldRunIntegrationTests() ? describe : describe.skip; + +// Validate API keys before running tests +if (shouldRunIntegrationTests()) { + validateApiKeys(["ANTHROPIC_API_KEY"]); +} + +// SSH server config (shared across all SSH tests) +let sshConfig: SSHServerConfig | undefined; + +// ============================================================================ +// Test Helpers +// ============================================================================ + +/** + * Wait for a specific event type to appear in the stream + */ +async function waitForEvent( + sentEvents: Array<{ channel: string; data: unknown }>, + workspaceId: string, + eventType: string, + timeoutMs: number +): Promise { + const startTime = Date.now(); + const chatChannel = getChatChannel(workspaceId); + let pollInterval = 50; + + while (Date.now() - startTime < timeoutMs) { + const events = sentEvents + .filter((e) => e.channel === chatChannel) + .map((e) => e.data as WorkspaceChatMessage); + + // Check if the event has appeared + const targetEvent = events.find((e) => "type" in e && e.type === eventType); + if (targetEvent) { + return events; + } + + await new Promise((resolve) => setTimeout(resolve, pollInterval)); + pollInterval = Math.min(pollInterval * 1.5, 500); + } + + throw new Error(`Event ${eventType} did not appear within ${timeoutMs}ms`); +} + +/** + * Wait for stream to complete and collect all events + */ +async function waitForStreamCompletion( + sentEvents: Array<{ channel: string; data: unknown }>, + workspaceId: string, + timeoutMs = 15000 // Reduced for simple operations with fast model +): Promise { + return waitForEvent(sentEvents, workspaceId, "stream-end", timeoutMs); +} + +/** + * Extract text content from stream events + */ +function extractTextFromEvents(events: WorkspaceChatMessage[]): string { + return events + .filter((e) => "type" in e && e.type === "stream-delta" && "delta" in e) + .map((e: any) => e.delta || "") + .join(""); +} + +/** + * Create workspace helper and wait for init hook to complete + */ +async function createWorkspaceHelper( + env: TestEnvironment, + projectPath: string, + branchName: string, + runtimeConfig?: RuntimeConfig, + isSSH: boolean = false +): Promise<{ + workspaceId: string; + cleanup: () => Promise; +}> { + const trunkBranch = await detectDefaultTrunkBranch(projectPath); + const result = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_CREATE, + projectPath, + branchName, + trunkBranch, + runtimeConfig + ); + + if (!result.success) { + throw new Error(`Failed to create workspace: ${result.error}`); + } + + const workspaceId = result.metadata.id; + + // Wait for init hook to complete by watching for init-end event + // This is critical - file operations will fail if init hasn't finished + const initTimeout = isSSH ? SSH_INIT_WAIT_MS : INIT_HOOK_WAIT_MS; + try { + await waitForEvent(env.sentEvents, workspaceId, "init-end", initTimeout); + } catch (err) { + // Init hook might not exist or might have already completed before we started waiting + // This is not necessarily an error - just log it + console.log( + `Note: init-end event not detected within ${initTimeout}ms (may have completed early)` + ); + } + + const cleanup = async () => { + await env.mockIpcRenderer.invoke(IPC_CHANNELS.WORKSPACE_REMOVE, workspaceId); + }; + + return { workspaceId, cleanup }; +} + +/** + * Send message and wait for completion + */ +async function sendMessageAndWait( + env: TestEnvironment, + workspaceId: string, + message: string, + streamTimeout?: number +): Promise { + // Clear previous events + env.sentEvents.length = 0; + + // Send message with Haiku model and file-tools-only policy + const result = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_SEND_MESSAGE, + workspaceId, + message, + { + model: HAIKU_MODEL, + toolPolicy: FILE_TOOLS_ONLY, + } + ); + + if (!result.success) { + throw new Error(`Failed to send message: ${result.error}`); + } + + // Wait for stream completion + return await waitForStreamCompletion(env.sentEvents, workspaceId, streamTimeout); +} + +// ============================================================================ +// Tests +// ============================================================================ + +describeIntegration("Runtime File Editing Tools", () => { + beforeAll(async () => { + // Preload AI SDK providers and tokenizers to avoid race conditions in concurrent tests + await preloadTestModules(); + + // Check if Docker is available (required for SSH tests) + if (!(await isDockerAvailable())) { + throw new Error( + "Docker is required for SSH runtime tests. Please install Docker or skip tests by unsetting TEST_INTEGRATION." + ); + } + + // Start SSH server (shared across all tests for speed) + console.log("Starting SSH server container for file editing tests..."); + sshConfig = await startSSHServer(); + console.log(`SSH server ready on port ${sshConfig.port}`); + }, 60000); + + afterAll(async () => { + if (sshConfig) { + console.log("Stopping SSH server container..."); + await stopSSHServer(sshConfig); + } + }, 30000); + + // Test matrix: Run tests for both local and SSH runtimes + describe.each<{ type: "local" | "ssh" }>([{ type: "local" }, { type: "ssh" }])( + "Runtime: $type", + ({ type }) => { + // Helper to build runtime config + const getRuntimeConfig = (branchName: string): RuntimeConfig | undefined => { + if (type === "ssh" && sshConfig) { + return { + type: "ssh", + host: `testuser@localhost`, + srcBaseDir: `${sshConfig.workdir}/${branchName}`, + identityFile: sshConfig.privateKeyPath, + port: sshConfig.port, + }; + } + return undefined; // undefined = defaults to local + }; + + test.concurrent( + "should read file content with file_read tool", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + // Setup provider + await setupProviders(env.mockIpcRenderer, { + anthropic: { + apiKey: getApiKey("ANTHROPIC_API_KEY"), + }, + }); + + // Create workspace + const branchName = generateBranchName("read-test"); + const runtimeConfig = getRuntimeConfig(branchName); + const { workspaceId, cleanup } = await createWorkspaceHelper( + env, + tempGitRepo, + branchName, + runtimeConfig, + type === "ssh" + ); + + try { + // Ask AI to create a test file + const testFileName = "test_read.txt"; + const streamTimeout = + type === "ssh" ? STREAM_TIMEOUT_SSH_MS : STREAM_TIMEOUT_LOCAL_MS; + const createEvents = await sendMessageAndWait( + env, + workspaceId, + `Create a file called ${testFileName} with the content: "Hello from cmux file tools!"`, + streamTimeout + ); + + // Verify file was created successfully + const createStreamEnd = createEvents.find( + (e) => "type" in e && e.type === "stream-end" + ); + expect(createStreamEnd).toBeDefined(); + expect((createStreamEnd as any).error).toBeUndefined(); + + // Now ask AI to read the file + const readEvents = await sendMessageAndWait( + env, + workspaceId, + `Read the file ${testFileName} and tell me what it contains.`, + streamTimeout + ); + + // Verify stream completed successfully + const streamEnd = readEvents.find((e) => "type" in e && e.type === "stream-end"); + expect(streamEnd).toBeDefined(); + expect((streamEnd as any).error).toBeUndefined(); + + // Verify file_read tool was called + const toolCalls = readEvents.filter( + (e) => "type" in e && e.type === "tool-call-start" + ); + const fileReadCall = toolCalls.find((e: any) => e.toolName === "file_read"); + expect(fileReadCall).toBeDefined(); + + // Verify response mentions the content + const responseText = extractTextFromEvents(readEvents); + expect(responseText.toLowerCase()).toContain("hello"); + } finally { + await cleanup(); + } + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + type === "ssh" ? TEST_TIMEOUT_SSH_MS : TEST_TIMEOUT_LOCAL_MS + ); + + test.concurrent( + "should replace text with file_edit_replace_string tool", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + // Setup provider + await setupProviders(env.mockIpcRenderer, { + anthropic: { + apiKey: getApiKey("ANTHROPIC_API_KEY"), + }, + }); + + // Create workspace + const branchName = generateBranchName("replace-test"); + const runtimeConfig = getRuntimeConfig(branchName); + const { workspaceId, cleanup } = await createWorkspaceHelper( + env, + tempGitRepo, + branchName, + runtimeConfig, + type === "ssh" + ); + + try { + // Ask AI to create a test file + const testFileName = "test_replace.txt"; + const streamTimeout = + type === "ssh" ? STREAM_TIMEOUT_SSH_MS : STREAM_TIMEOUT_LOCAL_MS; + const createEvents = await sendMessageAndWait( + env, + workspaceId, + `Create a file called ${testFileName} with the content: "The quick brown fox jumps over the lazy dog."`, + streamTimeout + ); + + // Verify file was created successfully + const createStreamEnd = createEvents.find( + (e) => "type" in e && e.type === "stream-end" + ); + expect(createStreamEnd).toBeDefined(); + expect((createStreamEnd as any).error).toBeUndefined(); + + // Ask AI to replace text + const replaceEvents = await sendMessageAndWait( + env, + workspaceId, + `In ${testFileName}, replace "brown fox" with "red panda".`, + streamTimeout + ); + + // Verify stream completed successfully + const streamEnd = replaceEvents.find((e) => "type" in e && e.type === "stream-end"); + expect(streamEnd).toBeDefined(); + expect((streamEnd as any).error).toBeUndefined(); + + // Verify file_edit_replace_string tool was called + const toolCalls = replaceEvents.filter( + (e) => "type" in e && e.type === "tool-call-start" + ); + const replaceCall = toolCalls.find( + (e: any) => e.toolName === "file_edit_replace_string" + ); + expect(replaceCall).toBeDefined(); + + // Verify the replacement was successful (check for diff or success message) + const responseText = extractTextFromEvents(replaceEvents); + expect( + responseText.toLowerCase().includes("replace") || + responseText.toLowerCase().includes("changed") || + responseText.toLowerCase().includes("updated") + ).toBe(true); + } finally { + await cleanup(); + } + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + type === "ssh" ? TEST_TIMEOUT_SSH_MS : TEST_TIMEOUT_LOCAL_MS + ); + + test.concurrent( + "should insert text with file_edit_insert tool", + async () => { + const env = await createTestEnvironment(); + const tempGitRepo = await createTempGitRepo(); + + try { + // Setup provider + await setupProviders(env.mockIpcRenderer, { + anthropic: { + apiKey: getApiKey("ANTHROPIC_API_KEY"), + }, + }); + + // Create workspace + const branchName = generateBranchName("insert-test"); + const runtimeConfig = getRuntimeConfig(branchName); + const { workspaceId, cleanup } = await createWorkspaceHelper( + env, + tempGitRepo, + branchName, + runtimeConfig, + type === "ssh" + ); + + try { + // Ask AI to create a test file + const testFileName = "test_insert.txt"; + const streamTimeout = + type === "ssh" ? STREAM_TIMEOUT_SSH_MS : STREAM_TIMEOUT_LOCAL_MS; + const createEvents = await sendMessageAndWait( + env, + workspaceId, + `Create a file called ${testFileName} with two lines: "Line 1" and "Line 3".`, + streamTimeout + ); + + // Verify file was created successfully + const createStreamEnd = createEvents.find( + (e) => "type" in e && e.type === "stream-end" + ); + expect(createStreamEnd).toBeDefined(); + expect((createStreamEnd as any).error).toBeUndefined(); + + // Ask AI to insert text + const insertEvents = await sendMessageAndWait( + env, + workspaceId, + `In ${testFileName}, insert "Line 2" between Line 1 and Line 3.`, + streamTimeout + ); + + // Verify stream completed successfully + const streamEnd = insertEvents.find((e) => "type" in e && e.type === "stream-end"); + expect(streamEnd).toBeDefined(); + expect((streamEnd as any).error).toBeUndefined(); + + // Verify file_edit_insert tool was called + const toolCalls = insertEvents.filter( + (e) => "type" in e && e.type === "tool-call-start" + ); + const insertCall = toolCalls.find((e: any) => e.toolName === "file_edit_insert"); + expect(insertCall).toBeDefined(); + + // Verify the insertion was successful + const responseText = extractTextFromEvents(insertEvents); + expect( + responseText.toLowerCase().includes("insert") || + responseText.toLowerCase().includes("add") || + responseText.toLowerCase().includes("updated") + ).toBe(true); + } finally { + await cleanup(); + } + } finally { + await cleanupTestEnvironment(env); + await cleanupTempGitRepo(tempGitRepo); + } + }, + type === "ssh" ? TEST_TIMEOUT_SSH_MS : TEST_TIMEOUT_LOCAL_MS + ); + } + ); +}); diff --git a/tests/ipcMain/sendMessage.test.ts b/tests/ipcMain/sendMessage.test.ts index 5edd61b0c9..f852fbb288 100644 --- a/tests/ipcMain/sendMessage.test.ts +++ b/tests/ipcMain/sendMessage.test.ts @@ -48,6 +48,13 @@ describeIntegration("IpcMain sendMessage integration tests", () => { if (process.env.CI && typeof jest !== "undefined" && jest.retryTimes) { jest.retryTimes(3, { logErrorsBeforeRetry: true }); } + + // Load tokenizer modules once before all tests (takes ~14s) + // This ensures accurate token counts for API calls without timing out individual tests + beforeAll(async () => { + const { loadTokenizerModules } = await import("../../src/utils/main/tokenizer"); + await loadTokenizerModules(); + }, 30000); // 30s timeout for tokenizer loading // Run tests for each provider concurrently describe.each(PROVIDER_CONFIGS)("%s:%s provider tests", (provider, model) => { test.concurrent( diff --git a/tests/ipcMain/setup.ts b/tests/ipcMain/setup.ts index c0cb3ba01e..20d7c44d3f 100644 --- a/tests/ipcMain/setup.ts +++ b/tests/ipcMain/setup.ts @@ -9,8 +9,6 @@ import { IpcMain } from "../../src/services/ipcMain"; import { IPC_CHANNELS } from "../../src/constants/ipc-constants"; import { generateBranchName, createWorkspace } from "./helpers"; import { shouldRunIntegrationTests, validateApiKeys, getApiKey } from "../testUtils"; -import { loadTokenizerModules } from "../../src/utils/main/tokenizer"; -import { preloadAISDKProviders } from "../../src/services/aiService"; export interface TestEnvironment { config: Config; @@ -134,6 +132,18 @@ export async function setupProviders( // Re-export test utilities for backwards compatibility export { shouldRunIntegrationTests, validateApiKeys, getApiKey }; +/** + * Preload modules that may be imported dynamically during concurrent tests. + * Call this in beforeAll hooks to prevent Jest sandbox race conditions. + */ +export async function preloadTestModules(): Promise { + const [{ loadTokenizerModules }, { preloadAISDKProviders }] = await Promise.all([ + import("../../src/utils/main/tokenizer"), + import("../../src/services/aiService"), + ]); + await Promise.all([loadTokenizerModules(), preloadAISDKProviders()]); +} + /** * Setup a complete workspace with provider * Encapsulates: env creation, provider setup, workspace creation, event clearing @@ -151,14 +161,6 @@ export async function setupWorkspace( }> { const { createTempGitRepo, cleanupTempGitRepo } = await import("./helpers"); - // Preload tokenizer modules to ensure accurate token counts for API calls - // Without this, tests would use /4 approximation which can cause API errors - await loadTokenizerModules(); - - // Preload AI SDK providers to avoid race conditions with dynamic imports - // in concurrent test environments - await preloadAISDKProviders(); - // Create dedicated temp git repo for this test const tempGitRepo = await createTempGitRepo(); diff --git a/tests/ipcMain/streamErrorRecovery.test.ts b/tests/ipcMain/streamErrorRecovery.test.ts index 658704ff51..5b4e8e3ceb 100644 --- a/tests/ipcMain/streamErrorRecovery.test.ts +++ b/tests/ipcMain/streamErrorRecovery.test.ts @@ -220,6 +220,13 @@ describeIntegration("Stream Error Recovery (No Amnesia)", () => { jest.retryTimes(3, { logErrorsBeforeRetry: true }); } + // Load tokenizer modules once before all tests (takes ~14s) + // This ensures accurate token counts for API calls without timing out individual tests + beforeAll(async () => { + const { loadTokenizerModules } = await import("../../src/utils/main/tokenizer"); + await loadTokenizerModules(); + }, 30000); // 30s timeout for tokenizer loading + test.concurrent( "should preserve exact prefix and continue from exact point after stream error", async () => { diff --git a/tests/ipcMain/test-helpers/runtimeTestHelpers.ts b/tests/ipcMain/test-helpers/runtimeTestHelpers.ts new file mode 100644 index 0000000000..ab2a68d458 --- /dev/null +++ b/tests/ipcMain/test-helpers/runtimeTestHelpers.ts @@ -0,0 +1,149 @@ +/** + * Shared test helpers for runtime integration tests + * + * These helpers are used across multiple test files (runtimeFileEditing, runtimeExecuteBash, etc.) + * to reduce code duplication and ensure consistent test patterns. + */ + +import { IPC_CHANNELS, getChatChannel } from "../../../src/constants/ipc-constants"; +import { detectDefaultTrunkBranch } from "../../../src/git"; +import type { TestEnvironment } from "../setup"; +import type { RuntimeConfig } from "../../../src/types/runtime"; +import type { WorkspaceChatMessage } from "../../../src/types/ipc"; +import type { ToolPolicy } from "../../../src/utils/tools/toolPolicy"; + +// Constants +const INIT_HOOK_WAIT_MS = 1500; // Wait for async init hook completion (local runtime) +const SSH_INIT_WAIT_MS = 7000; // SSH init includes sync + checkout + hook, takes longer + +/** + * Wait for a specific event type to appear in the stream + */ +async function waitForEvent( + sentEvents: Array<{ channel: string; data: unknown }>, + workspaceId: string, + eventType: string, + timeoutMs: number +): Promise { + const startTime = Date.now(); + const chatChannel = getChatChannel(workspaceId); + let pollInterval = 50; + + while (Date.now() - startTime < timeoutMs) { + const events = sentEvents + .filter((e) => e.channel === chatChannel) + .map((e) => e.data as WorkspaceChatMessage); + + // Check if the event has appeared + const targetEvent = events.find((e) => "type" in e && e.type === eventType); + if (targetEvent) { + return events; + } + + await new Promise((resolve) => setTimeout(resolve, pollInterval)); + pollInterval = Math.min(pollInterval * 1.5, 500); + } + + throw new Error(`Event ${eventType} did not appear within ${timeoutMs}ms`); +} + +/** + * Wait for stream to complete and collect all events + */ +async function waitForStreamCompletion( + sentEvents: Array<{ channel: string; data: unknown }>, + workspaceId: string, + timeoutMs = 20000 // Sufficient for most operations with fast models +): Promise { + return waitForEvent(sentEvents, workspaceId, "stream-end", timeoutMs); +} + +/** + * Create a workspace and wait for init hook completion + */ +export async function createWorkspaceHelper( + env: TestEnvironment, + repoPath: string, + branchName: string, + runtimeConfig: RuntimeConfig | undefined, + isSSH: boolean +): Promise<{ workspaceId: string; cleanup: () => Promise }> { + // Detect trunk branch + const trunkBranch = await detectDefaultTrunkBranch(repoPath); + + // Create workspace + const result: any = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_CREATE, + repoPath, + branchName, + trunkBranch, + runtimeConfig + ); + + if (!result.success) { + throw new Error(`Failed to create workspace: ${result.error}`); + } + + const workspaceId = result.metadata.id; + + // Wait for init hook to complete by watching for init-end event + // This is critical - file operations will fail if init hasn't finished + const initTimeout = isSSH ? SSH_INIT_WAIT_MS : INIT_HOOK_WAIT_MS; + try { + await waitForEvent(env.sentEvents, workspaceId, "init-end", initTimeout); + } catch (err) { + // Init hook might not exist or might have already completed before we started waiting + // This is not necessarily an error - just log it + console.log( + `Note: init-end event not detected within ${initTimeout}ms (may have completed early)` + ); + } + + const cleanup = async () => { + await env.mockIpcRenderer.invoke(IPC_CHANNELS.WORKSPACE_REMOVE, workspaceId); + }; + + return { workspaceId, cleanup }; +} + +/** + * Send message and wait for completion + */ +export async function sendMessageAndWait( + env: TestEnvironment, + workspaceId: string, + message: string, + model: string, + toolPolicy: ToolPolicy +): Promise { + // Clear previous events + env.sentEvents.length = 0; + + // Send message + const result = await env.mockIpcRenderer.invoke( + IPC_CHANNELS.WORKSPACE_SEND_MESSAGE, + workspaceId, + message, + { + model, + toolPolicy, + } + ); + + if (!result.success) { + throw new Error(`Failed to send message: ${result.error}`); + } + + // Wait for stream completion + return await waitForStreamCompletion(env.sentEvents, workspaceId); +} + +/** + * Extract text content from stream events + */ +export function extractTextFromEvents(events: WorkspaceChatMessage[]): string { + return events + .filter((e: any) => e.type === "stream-delta" && "delta" in e) + .map((e: any) => e.delta || "") + .join(""); +} diff --git a/tests/ipcMain/truncate.test.ts b/tests/ipcMain/truncate.test.ts index 312631c952..d60a837e16 100644 --- a/tests/ipcMain/truncate.test.ts +++ b/tests/ipcMain/truncate.test.ts @@ -24,6 +24,13 @@ describeIntegration("IpcMain truncate integration tests", () => { jest.retryTimes(3, { logErrorsBeforeRetry: true }); } + // Load tokenizer modules once before all tests (takes ~14s) + // This ensures accurate token counts for API calls without timing out individual tests + beforeAll(async () => { + const { loadTokenizerModules } = await import("../../src/utils/main/tokenizer"); + await loadTokenizerModules(); + }, 30000); // 30s timeout for tokenizer loading + test.concurrent( "should truncate 50% of chat history and verify context is updated", async () => { diff --git a/tests/ipcMain/workspaceInitHook.test.ts b/tests/ipcMain/workspaceInitHook.test.ts index 052bac5cbb..e23dd8e6ea 100644 --- a/tests/ipcMain/workspaceInitHook.test.ts +++ b/tests/ipcMain/workspaceInitHook.test.ts @@ -139,7 +139,8 @@ describeIntegration("IpcMain workspace init hook integration tests", () => { const startEvent = initEvents.find((e) => isInitStart(e)); expect(startEvent).toBeDefined(); if (startEvent && isInitStart(startEvent)) { - expect(startEvent.hookPath).toContain(".cmux/init"); + // Hook path should be the project path (where .cmux/init exists) + expect(startEvent.hookPath).toBeTruthy(); } // Should have output and error lines @@ -152,9 +153,13 @@ describeIntegration("IpcMain workspace init hook integration tests", () => { { type: "init-output" } >[]; - expect(outputEvents.length).toBe(2); - expect(outputEvents[0].line).toBe("Installing dependencies..."); - expect(outputEvents[1].line).toBe("Build complete!"); + // Should have workspace creation logs + hook output + expect(outputEvents.length).toBeGreaterThanOrEqual(2); + + // Verify hook output is present (may have workspace creation logs before it) + const outputLines = outputEvents.map((e) => e.line); + expect(outputLines).toContain("Installing dependencies..."); + expect(outputLines).toContain("Build complete!"); expect(errorEvents.length).toBe(1); expect(errorEvents[0].line).toBe("Warning: deprecated package"); @@ -287,13 +292,26 @@ describeIntegration("IpcMain workspace init hook integration tests", () => { // Wait a bit to ensure no events are emitted await new Promise((resolve) => setTimeout(resolve, 500)); - // Verify no init events were sent on chat channel + // Verify init events were sent (workspace creation logs even without hook) const initEvents = env.sentEvents .filter((e) => e.channel === getChatChannel(workspaceId)) .map((e) => e.data as WorkspaceChatMessage) .filter((msg) => isInitStart(msg) || isInitOutput(msg) || isInitEnd(msg)); - expect(initEvents.length).toBe(0); + // Should have init-start event (always emitted, even without hook) + const startEvent = initEvents.find((e) => isInitStart(e)); + expect(startEvent).toBeDefined(); + + // Should have workspace creation logs (e.g., "Creating git worktree...") + const outputEvents = initEvents.filter((e) => isInitOutput(e)); + expect(outputEvents.length).toBeGreaterThan(0); + + // Should have completion event with exit code 0 (success, no hook) + const endEvent = initEvents.find((e) => isInitEnd(e)); + expect(endEvent).toBeDefined(); + if (endEvent && isInitEnd(endEvent)) { + expect(endEvent.exitCode).toBe(0); + } // Workspace should still be usable const info = await env.mockIpcRenderer.invoke( @@ -344,11 +362,25 @@ describeIntegration("IpcMain workspace init hook integration tests", () => { const status = JSON.parse(statusContent); expect(status.status).toBe("success"); expect(status.exitCode).toBe(0); - expect(status.lines).toEqual([ - { line: "Installing dependencies", isError: false, timestamp: expect.any(Number) }, - { line: "Done!", isError: false, timestamp: expect.any(Number) }, - ]); - expect(status.hookPath).toContain(".cmux/init"); + + // Should include workspace creation logs + hook output + expect(status.lines).toEqual( + expect.arrayContaining([ + { line: "Creating git worktree...", isError: false, timestamp: expect.any(Number) }, + { + line: "Worktree created successfully", + isError: false, + timestamp: expect.any(Number), + }, + expect.objectContaining({ + line: expect.stringMatching(/Running init hook:/), + isError: false, + }), + { line: "Installing dependencies", isError: false, timestamp: expect.any(Number) }, + { line: "Done!", isError: false, timestamp: expect.any(Number) }, + ]) + ); + expect(status.hookPath).toBeTruthy(); // Project path where hook exists expect(status.startTime).toBeGreaterThan(0); expect(status.endTime).toBeGreaterThan(status.startTime); } finally { @@ -392,11 +424,14 @@ test.concurrent( .filter((e) => e.channel === getChatChannel(workspaceId)) .filter((e) => isInitOutput(e.data as WorkspaceChatMessage)); - initOutputEvents = currentEvents.map((e) => ({ + const allOutputEvents = currentEvents.map((e) => ({ timestamp: e.timestamp, // Use timestamp from when event was sent line: (e.data as { line: string }).line, })); + // Filter to only hook output lines (exclude workspace creation logs) + initOutputEvents = allOutputEvents.filter((e) => e.line.startsWith("Line ")); + if (initOutputEvents.length >= 4) break; await new Promise((resolve) => setTimeout(resolve, 50)); } diff --git a/tests/runtime/runtime.test.ts b/tests/runtime/runtime.test.ts new file mode 100644 index 0000000000..36e482bd7b --- /dev/null +++ b/tests/runtime/runtime.test.ts @@ -0,0 +1,1061 @@ +/** + * Runtime integration tests + * + * Tests both LocalRuntime and SSHRuntime against the same interface contract. + * SSH tests use a real Docker container (no mocking) for confidence. + */ + +// Jest globals are available automatically - no need to import +import * as path from "path"; +import { shouldRunIntegrationTests } from "../testUtils"; +import { + isDockerAvailable, + startSSHServer, + stopSSHServer, + type SSHServerConfig, +} from "./ssh-fixture"; +import { createTestRuntime, TestWorkspace, type RuntimeType } from "./test-helpers"; +import { execBuffered, readFileString, writeFileString } from "@/utils/runtime/helpers"; +import type { Runtime } from "@/runtime/Runtime"; +import { RuntimeError } from "@/runtime/Runtime"; + +// Skip all tests if TEST_INTEGRATION is not set +const describeIntegration = shouldRunIntegrationTests() ? describe : describe.skip; + +// SSH server config (shared across all tests) +let sshConfig: SSHServerConfig | undefined; + +describeIntegration("Runtime integration tests", () => { + beforeAll(async () => { + // Check if Docker is available (required for SSH tests) + if (!(await isDockerAvailable())) { + throw new Error( + "Docker is required for runtime integration tests. Please install Docker or skip tests by unsetting TEST_INTEGRATION." + ); + } + + // Start SSH server (shared across all tests for speed) + console.log("Starting SSH server container..."); + sshConfig = await startSSHServer(); + console.log(`SSH server ready on port ${sshConfig.port}`); + }, 60000); // 60s timeout for Docker operations + + afterAll(async () => { + if (sshConfig) { + console.log("Stopping SSH server container..."); + await stopSSHServer(sshConfig); + } + }, 30000); + + // Test matrix: Run all tests for both local and SSH runtimes + describe.each<{ type: RuntimeType }>([{ type: "local" }, { type: "ssh" }])( + "Runtime: $type", + ({ type }) => { + // Helper to create runtime for this test type + // Use a base working directory - TestWorkspace will create subdirectories as needed + const getBaseWorkdir = () => (type === "ssh" ? sshConfig!.workdir : "/tmp"); + const createRuntime = (): Runtime => createTestRuntime(type, getBaseWorkdir(), sshConfig); + + describe("exec() - Command execution", () => { + test.concurrent("captures stdout and stderr separately", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const result = await execBuffered(runtime, 'echo "output" && echo "error" >&2', { + cwd: workspace.path, + timeout: 30, + }); + + expect(result.stdout.trim()).toBe("output"); + expect(result.stderr.trim()).toBe("error"); + expect(result.exitCode).toBe(0); + expect(result.duration).toBeGreaterThan(0); + }); + + test.concurrent("returns correct exit code for failed commands", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const result = await execBuffered(runtime, "exit 42", { + cwd: workspace.path, + timeout: 30, + }); + + expect(result.exitCode).toBe(42); + }); + + test.concurrent("handles stdin input", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const result = await execBuffered(runtime, "cat", { + cwd: workspace.path, + timeout: 30, + stdin: "hello from stdin", + }); + + expect(result.stdout).toBe("hello from stdin"); + expect(result.exitCode).toBe(0); + }); + + test.concurrent("passes environment variables", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const result = await execBuffered(runtime, 'echo "$TEST_VAR"', { + cwd: workspace.path, + timeout: 30, + env: { TEST_VAR: "test-value" }, + }); + + expect(result.stdout.trim()).toBe("test-value"); + }); + + test.concurrent("handles empty output", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const result = await execBuffered(runtime, "true", { cwd: workspace.path, timeout: 30 }); + + expect(result.stdout).toBe(""); + expect(result.stderr).toBe(""); + expect(result.exitCode).toBe(0); + }); + + test.concurrent("handles commands with quotes and special characters", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const result = await execBuffered(runtime, 'echo "hello \\"world\\""', { + cwd: workspace.path, + timeout: 30, + }); + + expect(result.stdout.trim()).toBe('hello "world"'); + }); + + test.concurrent("respects working directory", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const result = await execBuffered(runtime, "pwd", { cwd: workspace.path, timeout: 30 }); + + expect(result.stdout.trim()).toContain(workspace.path); + }); + }); + + describe("readFile() - File reading", () => { + test.concurrent("reads file contents", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Write test file + const testContent = "Hello, World!\nLine 2\nLine 3"; + await writeFileString(runtime, `${workspace.path}/test.txt`, testContent); + + // Read it back + const content = await readFileString(runtime, `${workspace.path}/test.txt`); + + expect(content).toBe(testContent); + }); + + test.concurrent("reads empty file", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Write empty file + await writeFileString(runtime, `${workspace.path}/empty.txt`, ""); + + // Read it back + const content = await readFileString(runtime, `${workspace.path}/empty.txt`); + + expect(content).toBe(""); + }); + + test.concurrent("reads binary data correctly", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Create binary file with specific bytes + const binaryData = new Uint8Array([0, 1, 2, 255, 254, 253]); + const writer = runtime.writeFile(`${workspace.path}/binary.dat`).getWriter(); + await writer.write(binaryData); + await writer.close(); + + // Read it back + const stream = runtime.readFile(`${workspace.path}/binary.dat`); + const reader = stream.getReader(); + const chunks: Uint8Array[] = []; + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + chunks.push(value); + } + + // Concatenate chunks + const readData = new Uint8Array(chunks.reduce((acc, chunk) => acc + chunk.length, 0)); + let offset = 0; + for (const chunk of chunks) { + readData.set(chunk, offset); + offset += chunk.length; + } + + expect(readData).toEqual(binaryData); + }); + + test.concurrent("throws RuntimeError for non-existent file", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + await expect( + readFileString(runtime, `${workspace.path}/does-not-exist.txt`) + ).rejects.toThrow(RuntimeError); + }); + + test.concurrent("throws RuntimeError when reading a directory", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Create subdirectory + await execBuffered(runtime, `mkdir -p subdir`, { cwd: workspace.path, timeout: 30 }); + + await expect(readFileString(runtime, `${workspace.path}/subdir`)).rejects.toThrow(); + }); + }); + + describe("writeFile() - File writing", () => { + test.concurrent("writes file contents", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const content = "Test content\nLine 2"; + await writeFileString(runtime, `${workspace.path}/output.txt`, content); + + // Verify by reading back + const result = await execBuffered(runtime, "cat output.txt", { + cwd: workspace.path, + timeout: 30, + }); + + expect(result.stdout).toBe(content); + }); + + test.concurrent("overwrites existing file", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const path = `${workspace.path}/overwrite.txt`; + + // Write initial content + await writeFileString(runtime, path, "original"); + + // Overwrite + await writeFileString(runtime, path, "new content"); + + // Verify + const content = await readFileString(runtime, path); + expect(content).toBe("new content"); + }); + + test.concurrent("writes empty file", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + await writeFileString(runtime, `${workspace.path}/empty.txt`, ""); + + const content = await readFileString(runtime, `${workspace.path}/empty.txt`); + expect(content).toBe(""); + }); + + test.concurrent("writes binary data", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const binaryData = new Uint8Array([0, 1, 2, 255, 254, 253]); + const writer = runtime.writeFile(`${workspace.path}/binary.dat`).getWriter(); + await writer.write(binaryData); + await writer.close(); + + // Verify with wc -c (byte count) + const result = await execBuffered(runtime, "wc -c < binary.dat", { + cwd: workspace.path, + timeout: 30, + }); + + expect(result.stdout.trim()).toBe("6"); + }); + + test.concurrent("creates parent directories if needed", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + await writeFileString(runtime, `${workspace.path}/nested/dir/file.txt`, "content"); + + const content = await readFileString(runtime, `${workspace.path}/nested/dir/file.txt`); + expect(content).toBe("content"); + }); + + test.concurrent("handles special characters in content", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const specialContent = 'Special chars: \n\t"quotes"\'\r\n$VAR`cmd`'; + await writeFileString(runtime, `${workspace.path}/special.txt`, specialContent); + + const content = await readFileString(runtime, `${workspace.path}/special.txt`); + expect(content).toBe(specialContent); + }); + }); + + describe("stat() - File metadata", () => { + test.concurrent("returns file metadata", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const content = "Test content"; + await writeFileString(runtime, `${workspace.path}/test.txt`, content); + + const stat = await runtime.stat(`${workspace.path}/test.txt`); + + expect(stat.size).toBe(content.length); + expect(stat.isDirectory).toBe(false); + // Check modifiedTime is a valid date (use getTime() to avoid Jest Date issues) + expect(typeof stat.modifiedTime.getTime).toBe("function"); + expect(stat.modifiedTime.getTime()).toBeGreaterThan(0); + expect(stat.modifiedTime.getTime()).toBeLessThanOrEqual(Date.now()); + }); + + test.concurrent("returns directory metadata", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + await execBuffered(runtime, "mkdir subdir", { cwd: workspace.path, timeout: 30 }); + + const stat = await runtime.stat(`${workspace.path}/subdir`); + + expect(stat.isDirectory).toBe(true); + }); + + test.concurrent("throws RuntimeError for non-existent path", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + await expect(runtime.stat(`${workspace.path}/does-not-exist`)).rejects.toThrow( + RuntimeError + ); + }); + + test.concurrent("returns correct size for empty file", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + await writeFileString(runtime, `${workspace.path}/empty.txt`, ""); + + const stat = await runtime.stat(`${workspace.path}/empty.txt`); + + expect(stat.size).toBe(0); + expect(stat.isDirectory).toBe(false); + }); + }); + + describe("Edge cases", () => { + test.concurrent( + "handles large files efficiently", + async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Create 1MB file + const largeContent = "x".repeat(1024 * 1024); + await writeFileString(runtime, `${workspace.path}/large.txt`, largeContent); + + const content = await readFileString(runtime, `${workspace.path}/large.txt`); + + expect(content.length).toBe(1024 * 1024); + expect(content).toBe(largeContent); + }, + 30000 + ); + + test.concurrent("handles concurrent operations", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Run multiple file operations concurrently + const operations = Array.from({ length: 10 }, async (_, i) => { + const path = `${workspace.path}/concurrent-${i}.txt`; + await writeFileString(runtime, path, `content-${i}`); + const content = await readFileString(runtime, path); + expect(content).toBe(`content-${i}`); + }); + + await Promise.all(operations); + }); + + test.concurrent("handles paths with spaces", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const path = `${workspace.path}/file with spaces.txt`; + await writeFileString(runtime, path, "content"); + + const content = await readFileString(runtime, path); + expect(content).toBe("content"); + }); + + test.concurrent("handles very long file paths", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Create nested directories + const longPath = `${workspace.path}/a/b/c/d/e/f/g/h/i/j/file.txt`; + await writeFileString(runtime, longPath, "nested"); + + const content = await readFileString(runtime, longPath); + expect(content).toBe("nested"); + }); + }); + + describe("Git operations", () => { + test.concurrent("can initialize a git repository", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Initialize git repo + const result = await execBuffered(runtime, "git init", { + cwd: workspace.path, + timeout: 30, + }); + + expect(result.exitCode).toBe(0); + + // Verify .git directory exists + const stat = await runtime.stat(`${workspace.path}/.git`); + expect(stat.isDirectory).toBe(true); + }); + + test.concurrent("can create commits", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Initialize git and configure user + await execBuffered( + runtime, + `git init && git config user.email "test@example.com" && git config user.name "Test User"`, + { cwd: workspace.path, timeout: 30 } + ); + + // Create a file and commit + await writeFileString(runtime, `${workspace.path}/test.txt`, "initial content"); + await execBuffered(runtime, `git add test.txt && git commit -m "Initial commit"`, { + cwd: workspace.path, + timeout: 30, + }); + + // Verify commit exists + const logResult = await execBuffered(runtime, "git log --oneline", { + cwd: workspace.path, + timeout: 30, + }); + + expect(logResult.stdout).toContain("Initial commit"); + }); + + test.concurrent("can create and checkout branches", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Setup git repo + await execBuffered( + runtime, + `git init && git config user.email "test@example.com" && git config user.name "Test"`, + { cwd: workspace.path, timeout: 30 } + ); + + // Create initial commit + await writeFileString(runtime, `${workspace.path}/file.txt`, "content"); + await execBuffered(runtime, `git add file.txt && git commit -m "init"`, { + cwd: workspace.path, + timeout: 30, + }); + + // Create and checkout new branch + await execBuffered(runtime, "git checkout -b feature-branch", { + cwd: workspace.path, + timeout: 30, + }); + + // Verify branch + const branchResult = await execBuffered(runtime, "git branch --show-current", { + cwd: workspace.path, + timeout: 30, + }); + + expect(branchResult.stdout.trim()).toBe("feature-branch"); + }); + + test.concurrent("can handle git status in dirty workspace", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Setup git repo with commit + await execBuffered( + runtime, + `git init && git config user.email "test@example.com" && git config user.name "Test"`, + { cwd: workspace.path, timeout: 30 } + ); + await writeFileString(runtime, `${workspace.path}/file.txt`, "original"); + await execBuffered(runtime, `git add file.txt && git commit -m "init"`, { + cwd: workspace.path, + timeout: 30, + }); + + // Make changes + await writeFileString(runtime, `${workspace.path}/file.txt`, "modified"); + + // Check status + const statusResult = await execBuffered(runtime, "git status --short", { + cwd: workspace.path, + timeout: 30, + }); + + expect(statusResult.stdout).toContain("M file.txt"); + }); + }); + + describe("Environment and shell behavior", () => { + test.concurrent("preserves multi-line output formatting", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const result = await execBuffered(runtime, 'echo "line1\nline2\nline3"', { + cwd: workspace.path, + timeout: 30, + }); + + expect(result.stdout).toContain("line1"); + expect(result.stdout).toContain("line2"); + expect(result.stdout).toContain("line3"); + }); + + test.concurrent("handles commands with pipes", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + await writeFileString(runtime, `${workspace.path}/test.txt`, "line1\nline2\nline3"); + + const result = await execBuffered(runtime, "cat test.txt | grep line2", { + cwd: workspace.path, + timeout: 30, + }); + + expect(result.stdout.trim()).toBe("line2"); + }); + + test.concurrent("handles command substitution", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const result = await execBuffered(runtime, 'echo "Current dir: $(basename $(pwd))"', { + cwd: workspace.path, + timeout: 30, + }); + + expect(result.stdout).toContain("Current dir:"); + }); + + test.concurrent("handles large stdout output", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Generate large output (1000 lines) + const result = await execBuffered(runtime, "seq 1 1000", { + cwd: workspace.path, + timeout: 30, + }); + + const lines = result.stdout.trim().split("\n"); + expect(lines.length).toBe(1000); + expect(lines[0]).toBe("1"); + expect(lines[999]).toBe("1000"); + }); + + test.concurrent("handles commands that produce no output but take time", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const result = await execBuffered(runtime, "sleep 0.1", { + cwd: workspace.path, + timeout: 30, + }); + + expect(result.exitCode).toBe(0); + expect(result.stdout).toBe(""); + expect(result.duration).toBeGreaterThanOrEqual(100); + }); + }); + + describe("Error handling", () => { + test.concurrent("handles command not found", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const result = await execBuffered(runtime, "nonexistentcommand", { + cwd: workspace.path, + timeout: 30, + }); + + expect(result.exitCode).not.toBe(0); + expect(result.stderr.toLowerCase()).toContain("not found"); + }); + + test.concurrent("handles syntax errors in bash", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + const result = await execBuffered(runtime, "if true; then echo 'missing fi'", { + cwd: workspace.path, + timeout: 30, + }); + + expect(result.exitCode).not.toBe(0); + }); + + test.concurrent("handles permission denied errors", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Create file without execute permission and try to execute it + await writeFileString(runtime, `${workspace.path}/script.sh`, "#!/bin/sh\necho test"); + await execBuffered(runtime, "chmod 644 script.sh", { + cwd: workspace.path, + timeout: 30, + }); + + const result = await execBuffered(runtime, "./script.sh", { + cwd: workspace.path, + timeout: 30, + }); + + expect(result.exitCode).not.toBe(0); + expect(result.stderr.toLowerCase()).toContain("permission denied"); + }); + }); + + describe("renameWorkspace() - Workspace renaming", () => { + test.concurrent("successfully renames workspace and updates git worktree", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Initialize a git repository + await execBuffered(runtime, "git init", { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered(runtime, 'git config user.email "test@example.com"', { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered(runtime, 'git config user.name "Test User"', { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered( + runtime, + 'echo "test" > test.txt && git add test.txt && git commit -m "initial"', + { + cwd: workspace.path, + timeout: 30, + } + ); + + // Compute srcDir and paths - runtime uses srcDir/projectName/workspaceName pattern + const projectName = + type === "ssh" ? path.basename(workspace.path) : path.basename(workspace.path); + const srcDir = type === "ssh" ? "/home/testuser/workspace" : path.dirname(workspace.path); + const getWorkspacePath = (name: string) => { + return type === "ssh" + ? `/home/testuser/workspace/${projectName}/${name}` + : `${srcDir}/${projectName}/${name}`; + }; + + // Create workspace directory structure + // - Local: Use git worktree (managed by git) + // - SSH: Create plain directory (not a git worktree) + const worktree1Path = getWorkspacePath("worktree-1"); + if (type === "local") { + await execBuffered(runtime, `git worktree add -b feature-branch "${worktree1Path}"`, { + cwd: workspace.path, + timeout: 30, + }); + } else { + // SSH: Just create a directory (simulate workspace structure) + await execBuffered( + runtime, + `mkdir -p "${worktree1Path}" && echo "test" > "${worktree1Path}/test.txt"`, + { + cwd: workspace.path, + timeout: 30, + } + ); + } + + // Rename the worktree using runtime.renameWorkspace + const result = await runtime.renameWorkspace( + workspace.path, + "worktree-1", + "worktree-renamed" + ); + + if (!result.success) { + console.error("Rename failed:", result.error); + } + expect(result.success).toBe(true); + if (result.success) { + expect(result.oldPath).toBe(worktree1Path); + expect(result.newPath).toBe(getWorkspacePath("worktree-renamed")); + + // Verify worktree was physically renamed + const oldPathCheck = await execBuffered( + runtime, + `test -d "${result.oldPath}" && echo "exists" || echo "missing"`, + { + cwd: workspace.path, + timeout: 30, + } + ); + expect(oldPathCheck.stdout.trim()).toBe("missing"); + + const newPathCheck = await execBuffered( + runtime, + `test -d "${result.newPath}" && echo "exists" || echo "missing"`, + { + cwd: workspace.path, + timeout: 30, + } + ); + expect(newPathCheck.stdout.trim()).toBe("exists"); + + // Verify contents were preserved + if (type === "local") { + // For local, verify git worktree list shows updated path + const worktreeList = await execBuffered(runtime, "git worktree list", { + cwd: workspace.path, + timeout: 30, + }); + expect(worktreeList.stdout).toContain(result.newPath); + expect(worktreeList.stdout).not.toContain(result.oldPath); + } else { + // For SSH, verify the file we created still exists + const fileCheck = await execBuffered( + runtime, + `test -f "${result.newPath}/test.txt" && echo "exists" || echo "missing"`, + { + cwd: workspace.path, + timeout: 30, + } + ); + expect(fileCheck.stdout.trim()).toBe("exists"); + } + } + + // Cleanup + if (type === "local") { + // Remove git worktree before workspace cleanup + await execBuffered( + runtime, + `git worktree remove "${getWorkspacePath("worktree-renamed")}"`, + { + cwd: workspace.path, + timeout: 30, + } + ).catch(() => { + // Ignore errors during cleanup + }); + } else { + // Remove directory + await execBuffered(runtime, `rm -rf "${getWorkspacePath("worktree-renamed")}"`, { + cwd: workspace.path, + timeout: 30, + }).catch(() => { + // Ignore errors during cleanup + }); + } + }); + + test.concurrent("returns error when trying to rename non-existent worktree", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Initialize a git repository + await execBuffered(runtime, "git init", { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered(runtime, 'git config user.email "test@example.com"', { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered(runtime, 'git config user.name "Test User"', { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered( + runtime, + 'echo "test" > test.txt && git add test.txt && git commit -m "initial"', + { + cwd: workspace.path, + timeout: 30, + } + ); + + const projectName = path.basename(workspace.path); + const srcDir = type === "ssh" ? "/home/testuser/workspace" : path.dirname(workspace.path); + + // Try to rename a worktree that doesn't exist + const result = await runtime.renameWorkspace(workspace.path, "non-existent", "new-name"); + + expect(result.success).toBe(false); + if (!result.success) { + // Error message differs between local (git worktree) and SSH (mv command) + if (type === "local") { + expect(result.error).toContain("Failed to move worktree"); + } else { + expect(result.error).toContain("Failed to rename directory"); + } + } + }); + }); + + describe("deleteWorkspace() - Workspace deletion", () => { + test.concurrent("successfully deletes workspace and cleans up git worktree", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Initialize a git repository + await execBuffered(runtime, "git init", { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered(runtime, 'git config user.email "test@example.com"', { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered(runtime, 'git config user.name "Test User"', { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered( + runtime, + 'echo "test" > test.txt && git add test.txt && git commit -m "initial"', + { + cwd: workspace.path, + timeout: 30, + } + ); + + // Compute srcDir and paths - runtime uses srcDir/projectName/workspaceName pattern + const projectName = + type === "ssh" ? path.basename(workspace.path) : path.basename(workspace.path); + const srcDir = type === "ssh" ? "/home/testuser/workspace" : path.dirname(workspace.path); + const getWorkspacePath = (name: string) => { + return type === "ssh" + ? `/home/testuser/workspace/${projectName}/${name}` + : `${srcDir}/${projectName}/${name}`; + }; + + // Create workspace directory structure + // - Local: Use git worktree (managed by git) + // - SSH: Create plain directory (not a git worktree) + const worktree1Path = getWorkspacePath("worktree-delete-test"); + if (type === "local") { + await execBuffered( + runtime, + `git worktree add -b delete-test-branch "${worktree1Path}"`, + { + cwd: workspace.path, + timeout: 30, + } + ); + } else { + // SSH: Just create a directory (simulate workspace structure) + await execBuffered( + runtime, + `mkdir -p "${worktree1Path}" && echo "test" > "${worktree1Path}/test.txt"`, + { + cwd: workspace.path, + timeout: 30, + } + ); + } + + // Verify workspace exists before deletion + const beforeCheck = await execBuffered( + runtime, + `test -d "${worktree1Path}" && echo "exists" || echo "missing"`, + { + cwd: workspace.path, + timeout: 30, + } + ); + expect(beforeCheck.stdout.trim()).toBe("exists"); + + // Delete the worktree using runtime.deleteWorkspace + const result = await runtime.deleteWorkspace( + workspace.path, + "worktree-delete-test", + false // force=false + ); + + if (!result.success) { + console.error("Delete failed:", result.error); + } + expect(result.success).toBe(true); + if (result.success) { + expect(result.deletedPath).toBe(worktree1Path); + + // Verify workspace was physically deleted + const afterCheck = await execBuffered( + runtime, + `test -d "${result.deletedPath}" && echo "exists" || echo "missing"`, + { + cwd: workspace.path, + timeout: 30, + } + ); + expect(afterCheck.stdout.trim()).toBe("missing"); + + // For local, verify git worktree list doesn't show the deleted worktree + if (type === "local") { + const worktreeList = await execBuffered(runtime, "git worktree list", { + cwd: workspace.path, + timeout: 30, + }); + expect(worktreeList.stdout).not.toContain(result.deletedPath); + } + } + }); + + test.concurrent( + "successfully force-deletes workspace with uncommitted changes (local only)", + async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Skip this test for SSH since force flag only matters for git worktrees + if (type === "ssh") { + return; + } + + // Initialize a git repository + await execBuffered(runtime, "git init", { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered(runtime, 'git config user.email "test@example.com"', { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered(runtime, 'git config user.name "Test User"', { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered( + runtime, + 'echo "test" > test.txt && git add test.txt && git commit -m "initial"', + { + cwd: workspace.path, + timeout: 30, + } + ); + + const projectName = path.basename(workspace.path); + const srcDir = path.dirname(workspace.path); + const worktreePath = `${srcDir}/${projectName}/worktree-dirty`; + + // Create worktree and add uncommitted changes + await execBuffered(runtime, `git worktree add -b dirty-branch "${worktreePath}"`, { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered(runtime, `echo "uncommitted" > "${worktreePath}/dirty.txt"`, { + cwd: workspace.path, + timeout: 30, + }); + + // Force delete should succeed even with uncommitted changes + const result = await runtime.deleteWorkspace( + workspace.path, + "worktree-dirty", + true // force=true + ); + + expect(result.success).toBe(true); + if (result.success) { + expect(result.deletedPath).toBe(worktreePath); + + // Verify workspace was deleted + const afterCheck = await execBuffered( + runtime, + `test -d "${result.deletedPath}" && echo "exists" || echo "missing"`, + { + cwd: workspace.path, + timeout: 30, + } + ); + expect(afterCheck.stdout.trim()).toBe("missing"); + } + } + ); + + test.concurrent("returns error when trying to delete non-existent workspace", async () => { + const runtime = createRuntime(); + await using workspace = await TestWorkspace.create(runtime, type); + + // Initialize a git repository (needed for local worktree commands) + if (type === "local") { + await execBuffered(runtime, "git init", { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered(runtime, 'git config user.email "test@example.com"', { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered(runtime, 'git config user.name "Test User"', { + cwd: workspace.path, + timeout: 30, + }); + await execBuffered( + runtime, + 'echo "test" > test.txt && git add test.txt && git commit -m "initial"', + { + cwd: workspace.path, + timeout: 30, + } + ); + } + + const projectName = path.basename(workspace.path); + const srcDir = type === "ssh" ? "/home/testuser/workspace" : path.dirname(workspace.path); + + // Try to delete a workspace that doesn't exist + const result = await runtime.deleteWorkspace(workspace.path, "non-existent", false); + + // For SSH with rm -rf, deleting non-existent directory succeeds (rm -rf is idempotent) + // For local git worktree, it should fail + if (type === "local") { + expect(result.success).toBe(false); + if (!result.success) { + expect(result.error).toContain("Failed to remove worktree"); + } + } else { + // SSH: rm -rf non-existent is a no-op (succeeds) + expect(result.success).toBe(true); + } + }); + }); + } + ); +}); diff --git a/tests/runtime/ssh-fixture.ts b/tests/runtime/ssh-fixture.ts new file mode 100644 index 0000000000..5505c3d661 --- /dev/null +++ b/tests/runtime/ssh-fixture.ts @@ -0,0 +1,278 @@ +/** + * Docker SSH server fixture for runtime integration tests + * + * Features: + * - Dynamic port allocation (no hardcoded ports) + * - Ephemeral SSH key generation per test run + * - Container lifecycle management + * - Isolated test runs on same machine + */ + +import * as crypto from "crypto"; +import * as fs from "fs/promises"; +import * as os from "os"; +import * as path from "path"; +import { spawn, type ChildProcess } from "child_process"; + +export interface SSHServerConfig { + /** Container ID */ + containerId: string; + /** Host to connect to (localhost:PORT) */ + host: string; + /** Port on host mapped to container's SSH port */ + port: number; + /** Path to private key file */ + privateKeyPath: string; + /** Path to public key file */ + publicKeyPath: string; + /** Working directory on remote host */ + workdir: string; + /** Temp directory for keys */ + tempDir: string; +} + +/** + * Check if Docker is available + */ +export async function isDockerAvailable(): Promise { + try { + await execCommand("docker", ["version"], { timeout: 5000 }); + return true; + } catch { + return false; + } +} + +/** + * Start SSH server in Docker container with dynamic port + */ +export async function startSSHServer(): Promise { + // Create temp directory for SSH keys + const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "cmux-ssh-test-")); + + try { + // Generate ephemeral SSH key pair + const privateKeyPath = path.join(tempDir, "id_rsa"); + const publicKeyPath = path.join(tempDir, "id_rsa.pub"); + + await execCommand("ssh-keygen", [ + "-t", + "rsa", + "-b", + "2048", + "-f", + privateKeyPath, + "-N", + "", // No passphrase + "-C", + "cmux-test", + ]); + + // Read public key + const publicKey = (await fs.readFile(publicKeyPath, "utf-8")).trim(); + + // Build Docker image (use context directory for COPY commands) + const dockerfilePath = path.join(__dirname, "ssh-server"); + await execCommand("docker", ["build", "-t", "cmux-ssh-test", dockerfilePath]); + + // Generate unique container name to avoid conflicts + const containerName = `cmux-ssh-test-${crypto.randomBytes(8).toString("hex")}`; + + // Start container with dynamic port mapping + // -p 0:22 tells Docker to assign a random available host port + const runResult = await execCommand("docker", [ + "run", + "-d", + "--name", + containerName, + "-p", + "0:22", // Dynamic port allocation + "-e", + `SSH_PUBLIC_KEY=${publicKey}`, + "--rm", // Auto-remove on stop + "cmux-ssh-test", + ]); + + const containerId = runResult.stdout.trim(); + + // Wait for container to be ready + await waitForContainer(containerId); + + // Get the dynamically assigned port + const portResult = await execCommand("docker", ["port", containerId, "22"]); + + // Port output format: "0.0.0.0:XXXXX" or "[::]:XXXXX" + const portMatch = portResult.stdout.match(/:(\d+)/); + if (!portMatch) { + throw new Error(`Failed to parse port from: ${portResult.stdout}`); + } + const port = parseInt(portMatch[1], 10); + + // Wait for SSH to be ready + await waitForSSH("localhost", port, privateKeyPath); + + return { + containerId, + host: `localhost:${port}`, + port, + privateKeyPath, + publicKeyPath, + workdir: "/home/testuser/workspace", + tempDir, + }; + } catch (error) { + // Cleanup temp directory on failure + await fs.rm(tempDir, { recursive: true, force: true }); + throw error; + } +} + +/** + * Stop SSH server and cleanup + */ +export async function stopSSHServer(config: SSHServerConfig): Promise { + try { + // Stop container (--rm flag will auto-remove it) + await execCommand("docker", ["stop", config.containerId], { timeout: 10000 }); + } catch (error) { + console.error("Error stopping container:", error); + } + + try { + // Cleanup temp directory + await fs.rm(config.tempDir, { recursive: true, force: true }); + } catch (error) { + console.error("Error cleaning up temp directory:", error); + } +} + +/** + * Wait for container to be in running state + */ +async function waitForContainer(containerId: string, maxAttempts = 30): Promise { + for (let i = 0; i < maxAttempts; i++) { + try { + const result = await execCommand("docker", [ + "inspect", + "-f", + "{{.State.Running}}", + containerId, + ]); + + if (result.stdout.trim() === "true") { + return; + } + } catch { + // Container not ready yet + } + + await sleep(100); + } + + throw new Error(`Container ${containerId} did not start within timeout`); +} + +/** + * Wait for SSH to be ready by attempting to connect + */ +async function waitForSSH( + host: string, + port: number, + privateKeyPath: string, + maxAttempts = 30 +): Promise { + for (let i = 0; i < maxAttempts; i++) { + try { + await execCommand( + "ssh", + [ + "-i", + privateKeyPath, + "-o", + "StrictHostKeyChecking=no", + "-o", + "UserKnownHostsFile=/dev/null", + "-o", + "LogLevel=ERROR", + "-o", + "ConnectTimeout=1", + "-p", + port.toString(), + "testuser@localhost", + "echo ready", + ], + { timeout: 2000 } + ); + + // Success! + return; + } catch { + // SSH not ready yet + } + + await sleep(100); + } + + throw new Error(`SSH at ${host}:${port} did not become ready within timeout`); +} + +/** + * Execute command and return result + */ +function execCommand( + command: string, + args: string[], + options?: { timeout?: number } +): Promise<{ stdout: string; stderr: string; exitCode: number }> { + return new Promise((resolve, reject) => { + let stdout = ""; + let stderr = ""; + let timedOut = false; + + const child = spawn(command, args); + + const timeout = options?.timeout + ? setTimeout(() => { + timedOut = true; + child.kill(); + reject(new Error(`Command timed out: ${command} ${args.join(" ")}`)); + }, options.timeout) + : undefined; + + child.stdout.on("data", (data) => { + stdout += data.toString(); + }); + + child.stderr.on("data", (data) => { + stderr += data.toString(); + }); + + child.on("close", (code) => { + if (timeout) clearTimeout(timeout); + if (timedOut) return; + + if (code === 0) { + resolve({ stdout, stderr, exitCode: code ?? 0 }); + } else { + reject( + new Error( + `Command failed with exit code ${code}: ${command} ${args.join(" ")}\nstderr: ${stderr}` + ) + ); + } + }); + + child.on("error", (error) => { + if (timeout) clearTimeout(timeout); + if (timedOut) return; + reject(error); + }); + }); +} + +/** + * Sleep for specified milliseconds + */ +function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} diff --git a/tests/runtime/ssh-server/Dockerfile b/tests/runtime/ssh-server/Dockerfile new file mode 100644 index 0000000000..f019acefc0 --- /dev/null +++ b/tests/runtime/ssh-server/Dockerfile @@ -0,0 +1,34 @@ +FROM alpine:latest + +# Install OpenSSH server, git, and bash +# bash is required for remote command execution (cmux forces bash to avoid shell compatibility issues) +RUN apk add --no-cache openssh-server git bash + +# Create test user +RUN adduser -D -s /bin/sh testuser && \ + echo "testuser:testuser" | chpasswd + +# Create .ssh directory for authorized_keys +RUN mkdir -p /home/testuser/.ssh && \ + chmod 700 /home/testuser/.ssh && \ + chown testuser:testuser /home/testuser/.ssh + +# Create working directory +RUN mkdir -p /home/testuser/workspace && \ + chown testuser:testuser /home/testuser/workspace + +# Setup SSH host keys +RUN ssh-keygen -A + +# Copy SSH config +COPY sshd_config /etc/ssh/sshd_config + +# Expose SSH port +EXPOSE 22 + +# Copy and set entrypoint +COPY entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] + diff --git a/tests/runtime/ssh-server/entrypoint.sh b/tests/runtime/ssh-server/entrypoint.sh new file mode 100755 index 0000000000..360a7698ae --- /dev/null +++ b/tests/runtime/ssh-server/entrypoint.sh @@ -0,0 +1,13 @@ +#!/bin/sh +set -e + +# The public key will be passed via environment variable or volume mount +if [ -n "$SSH_PUBLIC_KEY" ]; then + echo "$SSH_PUBLIC_KEY" > /home/testuser/.ssh/authorized_keys + chmod 600 /home/testuser/.ssh/authorized_keys + chown testuser:testuser /home/testuser/.ssh/authorized_keys +fi + +# Start SSH daemon in foreground +exec /usr/sbin/sshd -D -e + diff --git a/tests/runtime/ssh-server/sshd_config b/tests/runtime/ssh-server/sshd_config new file mode 100644 index 0000000000..8ba64883fa --- /dev/null +++ b/tests/runtime/ssh-server/sshd_config @@ -0,0 +1,31 @@ +# SSH daemon configuration for testing + +# Listen on all interfaces +ListenAddress 0.0.0.0 + +# Disable password authentication - key-based only +PasswordAuthentication no +PubkeyAuthentication yes +ChallengeResponseAuthentication no + +# Allow testuser only +AllowUsers testuser + +# Disable strict modes for test simplicity +StrictModes no + +# Logging +LogLevel INFO + +# Disable DNS lookups for faster connection +UseDNS no + +# Increase connection limits for concurrent test operations +# MaxStartups: start:rate:full (reject after 'start' unauthenticated connections) +MaxStartups 100:30:200 +# MaxSessions: max sessions per connection +MaxSessions 50 + +# Subsystems +Subsystem sftp /usr/lib/openssh/sftp-server + diff --git a/tests/runtime/test-helpers.ts b/tests/runtime/test-helpers.ts new file mode 100644 index 0000000000..3c54f096e2 --- /dev/null +++ b/tests/runtime/test-helpers.ts @@ -0,0 +1,182 @@ +/** + * Test helpers for runtime integration tests + */ + +import * as fs from "fs/promises"; +import * as os from "os"; +import * as path from "path"; +import type { Runtime } from "@/runtime/Runtime"; +import { LocalRuntime } from "@/runtime/LocalRuntime"; +import { SSHRuntime } from "@/runtime/SSHRuntime"; +import type { SSHServerConfig } from "./ssh-fixture"; + +/** + * Runtime type for test matrix + */ +export type RuntimeType = "local" | "ssh"; + +/** + * Create runtime instance based on type + */ +export function createTestRuntime( + type: RuntimeType, + workdir: string, + sshConfig?: SSHServerConfig +): Runtime { + switch (type) { + case "local": + return new LocalRuntime(workdir); + case "ssh": + if (!sshConfig) { + throw new Error("SSH config required for SSH runtime"); + } + return new SSHRuntime({ + host: `testuser@localhost`, + srcBaseDir: sshConfig.workdir, + identityFile: sshConfig.privateKeyPath, + port: sshConfig.port, + }); + } +} + +/** + * Test workspace - isolated temp directory for each test + */ +export class TestWorkspace { + public readonly path: string; + private readonly runtime: Runtime; + private readonly isRemote: boolean; + + private constructor(runtime: Runtime, workspacePath: string, isRemote: boolean) { + this.runtime = runtime; + this.path = workspacePath; + this.isRemote = isRemote; + } + + /** + * Create a test workspace with isolated directory + */ + static async create(runtime: Runtime, type: RuntimeType): Promise { + const isRemote = type === "ssh"; + + if (isRemote) { + // For SSH, create subdirectory in remote workdir + // The path is already set in SSHRuntime config + // Create a unique subdirectory + const testId = `test-${Date.now()}-${Math.random().toString(36).substring(7)}`; + const workspacePath = `/home/testuser/workspace/${testId}`; + + // Create directory on remote + const stream = await runtime.exec(`mkdir -p ${workspacePath}`, { + cwd: "/home/testuser", + timeout: 30, + }); + await stream.stdin.close(); + const exitCode = await stream.exitCode; + + if (exitCode !== 0) { + throw new Error(`Failed to create remote workspace: ${workspacePath}`); + } + + return new TestWorkspace(runtime, workspacePath, true); + } else { + // For local, use temp directory + const workspacePath = await fs.mkdtemp(path.join(os.tmpdir(), "runtime-test-")); + return new TestWorkspace(runtime, workspacePath, false); + } + } + + /** + * Cleanup workspace + */ + async cleanup(): Promise { + if (this.isRemote) { + // Remove remote directory + try { + const stream = await this.runtime.exec(`rm -rf ${this.path}`, { + cwd: "/home/testuser", + timeout: 60, + }); + await stream.stdin.close(); + await stream.exitCode; + } catch (error) { + console.error(`Failed to cleanup remote workspace ${this.path}:`, error); + } + } else { + // Remove local directory + try { + await fs.rm(this.path, { recursive: true, force: true }); + } catch (error) { + console.error(`Failed to cleanup local workspace ${this.path}:`, error); + } + } + } + + /** + * Disposable interface for using declarations + */ + async [Symbol.asyncDispose](): Promise { + await this.cleanup(); + } +} + +/** + * Configure SSH client to use test key + * + * Returns environment variables to pass to SSH commands + */ +export function getSSHEnv(sshConfig: SSHServerConfig): Record { + // Create SSH config content + const sshConfigContent = ` +Host ${sshConfig.host} + HostName localhost + Port ${sshConfig.port} + User testuser + IdentityFile ${sshConfig.privateKeyPath} + StrictHostKeyChecking no + UserKnownHostsFile /dev/null + LogLevel ERROR +`; + + // For SSH commands, we need to write this to a temp file and use -F + // But for our SSHRuntime, we can configure ~/.ssh/config or use environment + // For now, we'll rely on ssh command finding the key via standard paths + + // Filter out undefined values from process.env + const env: Record = {}; + for (const [key, value] of Object.entries(process.env)) { + if (value !== undefined) { + env[key] = value; + } + } + + return env; +} + +/** + * Wait for predicate to become true + */ +export async function waitFor( + predicate: () => Promise, + options?: { timeout?: number; interval?: number } +): Promise { + const timeout = options?.timeout ?? 5000; + const interval = options?.interval ?? 100; + const startTime = Date.now(); + + while (Date.now() - startTime < timeout) { + if (await predicate()) { + return; + } + await sleep(interval); + } + + throw new Error("Timeout waiting for predicate"); +} + +/** + * Sleep helper + */ +export function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} diff --git a/tsconfig.json b/tsconfig.json index c93ca6814d..b887b01f6d 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,7 +1,7 @@ { "compilerOptions": { "target": "ES2020", - "lib": ["ES2020", "DOM"], + "lib": ["ES2023", "DOM"], "module": "ESNext", "moduleResolution": "node", "jsx": "react-jsx",