From ae108f090532765751c3996ba4c863a9fe858ddf Mon Sep 17 00:00:00 2001 From: Victor Berchet Date: Wed, 21 Jan 2026 16:07:16 +0100 Subject: [PATCH 1/4] =?UTF-8?q?feat(unenv-preset):=20Remove=20the=20experi?= =?UTF-8?q?mental=20flag=20from=20=5Fstream=5Fwrap,=20=E2=80=A6=20(#12024)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Pete Bacon Darwin --- .changeset/hip-spies-wash.md | 7 + packages/unenv-preset/package.json | 2 +- packages/unenv-preset/src/preset.ts | 48 ++++--- .../wrangler/e2e/unenv-preset/preset.test.ts | 123 ++++++++++++------ pnpm-lock.yaml | 62 +-------- 5 files changed, 122 insertions(+), 120 deletions(-) create mode 100644 .changeset/hip-spies-wash.md diff --git a/.changeset/hip-spies-wash.md b/.changeset/hip-spies-wash.md new file mode 100644 index 000000000000..3514993b539b --- /dev/null +++ b/.changeset/hip-spies-wash.md @@ -0,0 +1,7 @@ +--- +"@cloudflare/unenv-preset": minor +--- + +Remove the experimental flag from `node:_stream_wrap`, `node:dgram`, `node:inspector`, and `node:sqlite` + +Those modules are no more experimental since workerd 1.20260115.0 diff --git a/packages/unenv-preset/package.json b/packages/unenv-preset/package.json index 83da1888fee2..ca030c14f609 100644 --- a/packages/unenv-preset/package.json +++ b/packages/unenv-preset/package.json @@ -49,7 +49,7 @@ }, "peerDependencies": { "unenv": "2.0.0-rc.24", - "workerd": "^1.20251221.0" + "workerd": "^1.20260115.0" }, "peerDependenciesMeta": { "workerd": { diff --git a/packages/unenv-preset/src/preset.ts b/packages/unenv-preset/src/preset.ts index 38c10340fb86..dfe8a35927b8 100644 --- a/packages/unenv-preset/src/preset.ts +++ b/packages/unenv-preset/src/preset.ts @@ -613,11 +613,12 @@ function getVmOverrides({ * Returns the overrides for `node:inspector` and `node:inspector/promises` (unenv or workerd) * * The native inspector implementation: - * - is experimental and has no default enable date + * - is enabled starting from 2026-01-29 * - can be enabled with the "enable_nodejs_inspector_module" flag * - can be disabled with the "disable_nodejs_inspector_module" flag */ function getInspectorOverrides({ + compatibilityDate, compatibilityFlags, }: { compatibilityDate: string; @@ -627,11 +628,12 @@ function getInspectorOverrides({ "disable_nodejs_inspector_module" ); - const enabledByFlag = - compatibilityFlags.includes("enable_nodejs_inspector_module") && - compatibilityFlags.includes("experimental"); + const enabledByFlag = compatibilityFlags.includes( + "enable_nodejs_inspector_module" + ); + const enabledByDate = compatibilityDate >= "2026-01-29"; - const enabled = enabledByFlag && !disabledByFlag; + const enabled = (enabledByFlag || enabledByDate) && !disabledByFlag; // When enabled, use the native `inspector` module from workerd return enabled @@ -649,11 +651,12 @@ function getInspectorOverrides({ * Returns the overrides for `node:sqlite` (unenv or workerd) * * The native sqlite implementation: - * - is experimental and has no default enable date + * - is enabled starting from 2026-01-29 * - can be enabled with the "enable_nodejs_sqlite_module" flag * - can be disabled with the "disable_nodejs_sqlite_module" flag */ function getSqliteOverrides({ + compatibilityDate, compatibilityFlags, }: { compatibilityDate: string; @@ -663,11 +666,12 @@ function getSqliteOverrides({ "disable_nodejs_sqlite_module" ); - const enabledByFlag = - compatibilityFlags.includes("enable_nodejs_sqlite_module") && - compatibilityFlags.includes("experimental"); + const enabledByFlag = compatibilityFlags.includes( + "enable_nodejs_sqlite_module" + ); + const enabledByDate = compatibilityDate >= "2026-01-29"; - const enabled = enabledByFlag && !disabledByFlag; + const enabled = (enabledByFlag || enabledByDate) && !disabledByFlag; // When enabled, use the native `sqlite` module from workerd return enabled @@ -685,11 +689,12 @@ function getSqliteOverrides({ * Returns the overrides for `node:dgram` (unenv or workerd) * * The native dgram implementation: - * - is experimental and has no default enable date + * - is enabled starting from 2026-01-29 * - can be enabled with the "enable_nodejs_dgram_module" flag * - can be disabled with the "disable_nodejs_dgram_module" flag */ function getDgramOverrides({ + compatibilityDate, compatibilityFlags, }: { compatibilityDate: string; @@ -699,11 +704,12 @@ function getDgramOverrides({ "disable_nodejs_dgram_module" ); - const enabledByFlag = - compatibilityFlags.includes("enable_nodejs_dgram_module") && - compatibilityFlags.includes("experimental"); + const enabledByFlag = compatibilityFlags.includes( + "enable_nodejs_dgram_module" + ); - const enabled = enabledByFlag && !disabledByFlag; + const enabledByDate = compatibilityDate >= "2026-01-29"; + const enabled = (enabledByFlag || enabledByDate) && !disabledByFlag; // When enabled, use the native `dgram` module from workerd return enabled @@ -721,11 +727,12 @@ function getDgramOverrides({ * Returns the overrides for `node:_stream_wrap` (unenv or workerd) * * The native _stream_wrap implementation: - * - is experimental and has no default enable date + * - is enabled starting from 2026-01-29 * - can be enabled with the "enable_nodejs_stream_wrap_module" flag * - can be disabled with the "disable_nodejs_stream_wrap_module" flag */ function getStreamWrapOverrides({ + compatibilityDate, compatibilityFlags, }: { compatibilityDate: string; @@ -735,11 +742,12 @@ function getStreamWrapOverrides({ "disable_nodejs_stream_wrap_module" ); - const enabledByFlag = - compatibilityFlags.includes("enable_nodejs_stream_wrap_module") && - compatibilityFlags.includes("experimental"); + const enabledByFlag = compatibilityFlags.includes( + "enable_nodejs_stream_wrap_module" + ); - const enabled = enabledByFlag && !disabledByFlag; + const enabledByDate = compatibilityDate >= "2026-01-29"; + const enabled = (enabledByFlag || enabledByDate) && !disabledByFlag; // When enabled, use the native `_stream_wrap` module from workerd return enabled diff --git a/packages/wrangler/e2e/unenv-preset/preset.test.ts b/packages/wrangler/e2e/unenv-preset/preset.test.ts index 4a8a4e3ae5f2..ed3bdc835352 100644 --- a/packages/wrangler/e2e/unenv-preset/preset.test.ts +++ b/packages/wrangler/e2e/unenv-preset/preset.test.ts @@ -466,92 +466,137 @@ const localTestConfigs: TestConfig[] = [ }, }, ], - // node:inspector and node:inspector/promises (experimental, no default enable date) + // node:inspector and node:inspector/promises [ - // TODO: add test for disabled by date (no date defined yet) - // TODO: add test for enabled by date (no date defined yet) + // { + // name: "inspector enabled by date", + // compatibilityDate: "2026-01-29", + // expectRuntimeFlags: { + // enable_nodejs_inspector_module: true, + // }, + // }, { - name: "inspector enabled by flag", + name: "inspector disabled by date", compatibilityDate: "2024-09-23", - compatibilityFlags: ["enable_nodejs_inspector_module", "experimental"], expectRuntimeFlags: { - enable_nodejs_inspector_module: true, + enable_nodejs_inspector_module: false, }, }, - // TODO: update the date past the default enable date (when defined) { - name: "inspector disabled by flag", + name: "inspector enabled by flag", compatibilityDate: "2024-09-23", - compatibilityFlags: ["disable_nodejs_inspector_module", "experimental"], + compatibilityFlags: ["enable_nodejs_inspector_module"], expectRuntimeFlags: { - enable_nodejs_inspector_module: false, + enable_nodejs_inspector_module: true, }, }, + // { + // name: "inspector disabled by flag", + // compatibilityDate: "2026-01-29", + // compatibilityFlags: ["disable_nodejs_inspector_module"], + // expectRuntimeFlags: { + // enable_nodejs_inspector_module: false, + // }, + // }, ], - // node:sqlite (experimental, no default enable date) + // node:sqlite [ - // TODO: add test for disabled by date (no date defined yet) - // TODO: add test for enabled by date (no date defined yet) + // { + // name: "sqlite enabled by date", + // compatibilityDate: "2026-01-29", + // expectRuntimeFlags: { + // enable_nodejs_sqlite_module: true, + // }, + // }, { - name: "sqlite enabled by flag", + name: "sqlite disabled by date", compatibilityDate: "2024-09-23", - compatibilityFlags: ["enable_nodejs_sqlite_module", "experimental"], expectRuntimeFlags: { - enable_nodejs_sqlite_module: true, + enable_nodejs_sqlite_module: false, }, }, - // TODO: update the date past the default enable date (when defined) { - name: "sqlite disabled by flag", + name: "sqlite enabled by flag", compatibilityDate: "2024-09-23", - compatibilityFlags: ["disable_nodejs_sqlite_module", "experimental"], + compatibilityFlags: ["enable_nodejs_sqlite_module"], expectRuntimeFlags: { - enable_nodejs_sqlite_module: false, + enable_nodejs_sqlite_module: true, }, }, + // { + // name: "sqlite disabled by flag", + // compatibilityDate: "2026-01-29", + // compatibilityFlags: ["disable_nodejs_sqlite_module"], + // expectRuntimeFlags: { + // enable_nodejs_sqlite_module: false, + // }, + // }, ], - // node:dgram (experimental, no default enable date) + // node:dgram [ - // TODO: add test for disabled by date (no date defined yet) - // TODO: add test for enabled by date (no date defined yet) + // { + // name: "dgram enabled by date", + // compatibilityDate: "2026-01-29", + // expectRuntimeFlags: { + // enable_nodejs_dgram_module: true, + // }, + // }, { - name: "dgram enabled by flag", + name: "dgram disabled by date", compatibilityDate: "2024-09-23", - compatibilityFlags: ["enable_nodejs_dgram_module", "experimental"], expectRuntimeFlags: { - enable_nodejs_dgram_module: true, + enable_nodejs_dgram_module: false, }, }, { - name: "dgram disabled by flag", + name: "dgram enabled by flag", compatibilityDate: "2024-09-23", - compatibilityFlags: ["disable_nodejs_dgram_module", "experimental"], + compatibilityFlags: ["enable_nodejs_dgram_module"], expectRuntimeFlags: { - enable_nodejs_dgram_module: false, + enable_nodejs_dgram_module: true, }, }, + // { + // name: "dgram disabled by flag", + // compatibilityDate: "2026-01-29", + // compatibilityFlags: ["disable_nodejs_dgram_module"], + // expectRuntimeFlags: { + // enable_nodejs_dgram_module: false, + // }, + // }, ], - // node:_stream_wrap (experimental, no default enable date) + // node:_stream_wrap [ - // TODO: add test for disabled by date (no date defined yet) - // TODO: add test for enabled by date (no date defined yet) + // { + // name: "_stream_wrap enabled by date", + // compatibilityDate: "2026-01-29", + // expectRuntimeFlags: { + // enable_nodejs_stream_wrap_module: true, + // }, + // }, { - name: "_stream_wrap enabled by flag", + name: "_stream_wrap disabled by date", compatibilityDate: "2024-09-23", - compatibilityFlags: ["enable_nodejs_stream_wrap_module", "experimental"], expectRuntimeFlags: { - enable_nodejs_stream_wrap_module: true, + enable_nodejs_stream_wrap_module: false, }, }, - // TODO: update the date past the default enable date (when defined) { - name: "_stream_wrap disabled by flag", + name: "_stream_wrap enabled by flag", compatibilityDate: "2024-09-23", - compatibilityFlags: ["disable_nodejs_stream_wrap_module", "experimental"], + compatibilityFlags: ["enable_nodejs_stream_wrap_module"], expectRuntimeFlags: { - enable_nodejs_stream_wrap_module: false, + enable_nodejs_stream_wrap_module: true, }, }, + // { + // name: "_stream_wrap disabled by flag", + // compatibilityDate: "2026-01-29", + // compatibilityFlags: ["disable_nodejs_stream_wrap_module"], + // expectRuntimeFlags: { + // enable_nodejs_stream_wrap_module: false, + // }, + // }, ], ].flat() as TestConfig[]; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 51b8d6490a04..13e2394b7d7b 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -2271,8 +2271,8 @@ importers: specifier: 2.0.0-rc.24 version: 2.0.0-rc.24 workerd: - specifier: ^1.20251221.0 - version: 1.20260111.0 + specifier: ^1.20260115.0 + version: 1.20260120.0 devDependencies: '@types/node-unenv': specifier: npm:@types/node@^22.14.0 @@ -4875,12 +4875,6 @@ packages: cpu: [x64] os: [darwin] - '@cloudflare/workerd-darwin-64@1.20260111.0': - resolution: {integrity: sha512-UGAjrGLev2/CMLZy7b+v1NIXA4Hupc/QJBFlJwMqldywMcJ/iEqvuUYYuVI2wZXuXeWkgmgFP87oFDQsg78YTQ==} - engines: {node: '>=16'} - cpu: [x64] - os: [darwin] - '@cloudflare/workerd-darwin-64@1.20260120.0': resolution: {integrity: sha512-JLHx3p5dpwz4wjVSis45YNReftttnI3ndhdMh5BUbbpdreN/g0jgxNt5Qp9tDFqEKl++N63qv+hxJiIIvSLR+Q==} engines: {node: '>=16'} @@ -4893,12 +4887,6 @@ packages: cpu: [arm64] os: [darwin] - '@cloudflare/workerd-darwin-arm64@1.20260111.0': - resolution: {integrity: sha512-YFAZwidLCQVa6rKCCaiWrhA+eh87a7MUhyd9lat3KSbLBAGpYM+ORpyTXpi2Gjm3j6Mp1e/wtzcFTSeMIy2UqA==} - engines: {node: '>=16'} - cpu: [arm64] - os: [darwin] - '@cloudflare/workerd-darwin-arm64@1.20260120.0': resolution: {integrity: sha512-1Md2tCRhZjwajsZNOiBeOVGiS3zbpLPzUDjHr4+XGTXWOA6FzzwScJwQZLa0Doc28Cp4Nr1n7xGL0Dwiz1XuOA==} engines: {node: '>=16'} @@ -4911,12 +4899,6 @@ packages: cpu: [x64] os: [linux] - '@cloudflare/workerd-linux-64@1.20260111.0': - resolution: {integrity: sha512-zx1GW6FwfOBjCV7QUCRzGRkViUtn3Is/zaaVPmm57xyy9sjtInx6/SdeBr2Y45tx9AnOP1CnaOFFdmH1P7VIEg==} - engines: {node: '>=16'} - cpu: [x64] - os: [linux] - '@cloudflare/workerd-linux-64@1.20260120.0': resolution: {integrity: sha512-O0mIfJfvU7F8N5siCoRDaVDuI12wkz2xlG4zK6/Ct7U9c9FiE0ViXNFWXFQm5PPj+qbkNRyhjUwhP+GCKTk5EQ==} engines: {node: '>=16'} @@ -4929,12 +4911,6 @@ packages: cpu: [arm64] os: [linux] - '@cloudflare/workerd-linux-arm64@1.20260111.0': - resolution: {integrity: sha512-wFVKxNvCyjRaAcgiSnJNJAmIos3p3Vv6Uhf4pFUZ9JIxr69GNlLWlm9SdCPvtwNFAjzSoDaKzDwjj5xqpuCS6Q==} - engines: {node: '>=16'} - cpu: [arm64] - os: [linux] - '@cloudflare/workerd-linux-arm64@1.20260120.0': resolution: {integrity: sha512-aRHO/7bjxVpjZEmVVcpmhbzpN6ITbFCxuLLZSW0H9O0C0w40cDCClWSi19T87Ax/PQcYjFNT22pTewKsupkckA==} engines: {node: '>=16'} @@ -4947,12 +4923,6 @@ packages: cpu: [x64] os: [win32] - '@cloudflare/workerd-windows-64@1.20260111.0': - resolution: {integrity: sha512-zWgd77L7OI1BxgBbG+2gybDahIMgPX5iNo6e3LqcEz1Xm3KfiqgnDyMBcxeQ7xDrj7fHUGAlc//QnKvDchuUoQ==} - engines: {node: '>=16'} - cpu: [x64] - os: [win32] - '@cloudflare/workerd-windows-64@1.20260120.0': resolution: {integrity: sha512-ASZIz1E8sqZQqQCgcfY1PJbBpUDrxPt8NZ+lqNil0qxnO4qX38hbCsdDF2/TDAuq0Txh7nu8ztgTelfNDlb4EA==} engines: {node: '>=16'} @@ -13970,11 +13940,6 @@ packages: engines: {node: '>=16'} hasBin: true - workerd@1.20260111.0: - resolution: {integrity: sha512-ov6Pt4k6d/ALfJja/EIHohT9IrY/f6GAa0arWEPat2qekp78xHbVM7jSxNWAMbaE7ZmnQQIFEGD1ZhAWZmQKIg==} - engines: {node: '>=16'} - hasBin: true - workerd@1.20260120.0: resolution: {integrity: sha512-R6X/VQOkwLTBGLp4VRUwLQZZVxZ9T9J8pGiJ6GQUMaRkY7TVWrCSkVfoNMM1/YyFsY5UYhhPoQe5IehnhZ3Pdw==} engines: {node: '>=16'} @@ -15439,45 +15404,30 @@ snapshots: '@cloudflare/workerd-darwin-64@1.20251210.0': optional: true - '@cloudflare/workerd-darwin-64@1.20260111.0': - optional: true - '@cloudflare/workerd-darwin-64@1.20260120.0': optional: true '@cloudflare/workerd-darwin-arm64@1.20251210.0': optional: true - '@cloudflare/workerd-darwin-arm64@1.20260111.0': - optional: true - '@cloudflare/workerd-darwin-arm64@1.20260120.0': optional: true '@cloudflare/workerd-linux-64@1.20251210.0': optional: true - '@cloudflare/workerd-linux-64@1.20260111.0': - optional: true - '@cloudflare/workerd-linux-64@1.20260120.0': optional: true '@cloudflare/workerd-linux-arm64@1.20251210.0': optional: true - '@cloudflare/workerd-linux-arm64@1.20260111.0': - optional: true - '@cloudflare/workerd-linux-arm64@1.20260120.0': optional: true '@cloudflare/workerd-windows-64@1.20251210.0': optional: true - '@cloudflare/workerd-windows-64@1.20260111.0': - optional: true - '@cloudflare/workerd-windows-64@1.20260120.0': optional: true @@ -25332,14 +25282,6 @@ snapshots: '@cloudflare/workerd-linux-arm64': 1.20251210.0 '@cloudflare/workerd-windows-64': 1.20251210.0 - workerd@1.20260111.0: - optionalDependencies: - '@cloudflare/workerd-darwin-64': 1.20260111.0 - '@cloudflare/workerd-darwin-arm64': 1.20260111.0 - '@cloudflare/workerd-linux-64': 1.20260111.0 - '@cloudflare/workerd-linux-arm64': 1.20260111.0 - '@cloudflare/workerd-windows-64': 1.20260111.0 - workerd@1.20260120.0: optionalDependencies: '@cloudflare/workerd-darwin-64': 1.20260120.0 From 614bbd709529191bbae6aa92790bbfe00a37e3d9 Mon Sep 17 00:00:00 2001 From: jbwcloudflare <90648441+jbwcloudflare@users.noreply.github.com> Date: Wed, 21 Jan 2026 11:48:34 -0500 Subject: [PATCH 2/4] =?UTF-8?q?Add=20support=20for=20increased=20Pages=20m?= =?UTF-8?q?ax=20asset=20limit=20in=20the=20pages=20validate=E2=80=A6=20(#1?= =?UTF-8?q?2030)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add support for increased Pages max asset limit in the pages validate command * Update .changeset/clever-cloths-push.md Co-authored-by: Edmund Hung --------- Co-authored-by: Edmund Hung --- .changeset/clever-cloths-push.md | 5 ++ .../__tests__/pages/project-validate.test.ts | 51 +++++++++++++++++++ packages/wrangler/src/pages/validate.ts | 6 +++ 3 files changed, 62 insertions(+) create mode 100644 .changeset/clever-cloths-push.md diff --git a/.changeset/clever-cloths-push.md b/.changeset/clever-cloths-push.md new file mode 100644 index 000000000000..4cdf1de1d95d --- /dev/null +++ b/.changeset/clever-cloths-push.md @@ -0,0 +1,5 @@ +--- +"wrangler": patch +--- + +Fix `wrangler pages project validate` to respect file count limits from `CF_PAGES_UPLOAD_JWT` diff --git a/packages/wrangler/src/__tests__/pages/project-validate.test.ts b/packages/wrangler/src/__tests__/pages/project-validate.test.ts index a1128282758c..af0d42be338c 100644 --- a/packages/wrangler/src/__tests__/pages/project-validate.test.ts +++ b/packages/wrangler/src/__tests__/pages/project-validate.test.ts @@ -83,4 +83,55 @@ describe("pages project validate", () => { "Error: Pages only supports up to 5 files in a deployment for your current plan. Ensure you have specified your build output directory correctly." ); }); + + it("should use fileCountLimit from CF_PAGES_UPLOAD_JWT when set", async () => { + // Create 11 files, which exceeds the mocked MAX_ASSET_COUNT_DEFAULT of 10 + for (let i = 0; i < 11; i++) { + writeFileSync(`logo${i}.png`, Buffer.alloc(1)); + } + + // Create a JWT with max_file_count_allowed: 20 + const jwt = + "header." + + Buffer.from(JSON.stringify({ max_file_count_allowed: 20 })).toString( + "base64" + ) + + ".signature"; + + vi.stubEnv("CF_PAGES_UPLOAD_JWT", jwt); + + // Should succeed because the JWT allows up to 20 files + await runWrangler("pages project validate ."); + + expect(std.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ──────────────────" + `); + expect(std.err).toMatchInlineSnapshot(`""`); + }); + + it("should error when file count exceeds limit from CF_PAGES_UPLOAD_JWT", async () => { + // Create 6 files + for (let i = 0; i < 6; i++) { + writeFileSync(`logo${i}.png`, Buffer.alloc(1)); + } + + // Create a JWT with max_file_count_allowed: 5 + const jwt = + "header." + + Buffer.from(JSON.stringify({ max_file_count_allowed: 5 })).toString( + "base64" + ) + + ".signature"; + + vi.stubEnv("CF_PAGES_UPLOAD_JWT", jwt); + + // Should fail because we have 6 files but JWT only allows 5 + await expect(() => + runWrangler("pages project validate .") + ).rejects.toThrowErrorMatchingInlineSnapshot( + `[Error: Error: Pages only supports up to 5 files in a deployment for your current plan. Ensure you have specified your build output directory correctly.]` + ); + }); }); diff --git a/packages/wrangler/src/pages/validate.ts b/packages/wrangler/src/pages/validate.ts index 055227d4864a..1688bacd58a9 100644 --- a/packages/wrangler/src/pages/validate.ts +++ b/packages/wrangler/src/pages/validate.ts @@ -7,6 +7,7 @@ import prettyBytes from "pretty-bytes"; import { createCommand } from "../core/create-command"; import { MAX_ASSET_COUNT_DEFAULT, MAX_ASSET_SIZE } from "./constants"; import { hashFile } from "./hash"; +import { maxFileCountAllowedFromClaims } from "./upload"; export const pagesProjectValidateCommand = createCommand({ metadata: { @@ -31,8 +32,13 @@ export const pagesProjectValidateCommand = createCommand({ throw new FatalError("Must specify a directory.", 1); } + const fileCountLimit = process.env.CF_PAGES_UPLOAD_JWT + ? maxFileCountAllowedFromClaims(process.env.CF_PAGES_UPLOAD_JWT) + : undefined; + await validate({ directory, + fileCountLimit, }); }, }); From 7d8c59a27e05e24f4d824cb898d8a1e5884a4c6f Mon Sep 17 00:00:00 2001 From: Pete Bacon Darwin Date: Wed, 21 Jan 2026 17:05:54 +0000 Subject: [PATCH 3/4] Improve changeset review job (#12028) * ci: make changeset review non-blocking Claude will still post a review comment to the PR, but the workflow will no longer fail if issues are found. This allows human reviewers to decide whether to address the feedback. Also removed structured JSON output in favor of human-readable comments. * docs: clarify changeset length and examples guidelines - Changesets should be more than one sentence but less than three paragraphs (unless the change is very large) - Code examples are encouraged but not mandatory - Updated CI workflow prompt to align with new guidelines * ci: defer to author's choice on changeset version type Claude should accept the author's choice of patch/minor unless it's clearly incorrect, rather than being prescriptive. --- .changeset/README.md | 4 ++-- .github/workflows/changeset-review.yml | 14 ++------------ 2 files changed, 4 insertions(+), 14 deletions(-) diff --git a/.changeset/README.md b/.changeset/README.md index 73bc5ec9a1d5..d743fffba841 100644 --- a/.changeset/README.md +++ b/.changeset/README.md @@ -37,7 +37,7 @@ pnpm changeset ``` - **TITLE**: A single sentence with an imperative description of the change -- **BODY**: One or more paragraphs explaining the reason for the change and anything notable about the approach +- **BODY**: One or more paragraphs explaining the reason for the change and anything notable about the approach. Aim for more than one sentence but less than three paragraphs to keep it succinct and useful. Larger changes may warrant more detail. ### Good Examples @@ -89,7 +89,7 @@ The changelog uses h3 for section headers, so any headers in changeset content m ### Code Examples -For new features or significant changes, include a brief usage example. This helps users understand how to use the new functionality. +For new features or significant changes, consider including a brief usage example. Examples can be helpful for users to understand new functionality, but they are not mandatory—use your judgment based on how self-explanatory the change is. ## Multiple Changesets diff --git a/.github/workflows/changeset-review.yml b/.github/workflows/changeset-review.yml index 7ee2be8372e2..445b37537517 100644 --- a/.github/workflows/changeset-review.yml +++ b/.github/workflows/changeset-review.yml @@ -59,8 +59,8 @@ jobs: For regular PRs, review: ${{ steps.changed-changesets.outputs.added_files }} Read `.changeset/README.md` for guidelines, then validate: - 1. **Version Type**: Correct patch/minor/major (major forbidden for wrangler) - 2. **Changelog Quality**: Meaningful descriptions with examples for features + 1. **Version Type**: Accept the author's choice of patch/minor unless clearly incorrect + 2. **Changelog Quality**: Meaningful descriptions (examples encouraged but not required for features) 3. **Markdown Headers**: No h1/h2/h3 headers (breaks changelog formatting) 4. **Analytics**: If the change collects more analytics, it should be a minor even though there is no user-visible change 5. **Dependabot**: Do not validate dependency update changesets for create-cloudflare @@ -72,20 +72,10 @@ jobs: If there are issues, output "⚠️ Issues found" followed by the specific problems. If the user has attached an image, use the Read tool to view it. If and only if it's a cute animal, include in your comment a short cuteness report in the style of WeRateDogs (e.g., "This is Barkley. He's wearing a tiny hat and doesn't know why. 13/10"). If it's not an animal, silently ignore the image. If there is more than one image, check each one to find an animal. If you do not find an animal, do not include any reference to a cuteness report. - - Return your structured output indicating whether there are blocking issues. claude_args: | --allowedTools "Read,Glob,Grep" - --output-format json - --json-schema '{"type":"object","properties":{"has_blocking_issues":{"type":"boolean","description":"True if any changeset has blocking issues that should fail the check"},"summary":{"type":"string","description":"One-line summary of the review"},"issues":{"type":"array","items":{"type":"object","properties":{"file":{"type":"string"},"issue":{"type":"string"},"severity":{"type":"string","enum":["blocking","warning"]}}}}},"required":["has_blocking_issues","summary"]}' - name: Skip notice if: github.event.pull_request.title != 'Version Packages' && steps.changed-changesets.outputs.added_files_count == 0 run: | echo "No new changesets to review (only minor edits to pre-existing changesets detected)" - - - name: Fail if blocking issues found - if: always() && steps.claude-review.outputs.structured_output != '' && fromJSON(steps.claude-review.outputs.structured_output).has_blocking_issues == true - run: | - echo "::error::Blocking changeset issues found. Please fix the issues identified in the Claude review comment." - exit 1 From f9e8a452fb299e6cb1a0ff2985347bfc277deac8 Mon Sep 17 00:00:00 2001 From: Ben <4991309+NuroDev@users.noreply.github.com> Date: Wed, 21 Jan 2026 18:02:57 +0000 Subject: [PATCH 4/4] Generate types for all bindings across all environments (#11893) * Added initial binding type aggregation fix * Minor refactoring * Added aggregate binding types changeset * temporarily disable media binding test (#11900) * Updated changeset description * Added JSDoc description to `generateEnvTypes` * Minor type generation refactoring * Minor `collectEnvironmentVars` refactoring * Added `getEnvConfig` helper to handle environment config validation * Minor `generateEnvTypes` refactoring * Removed optional syntax from JSDoc parameters * Renamed `collectAllBindings` to `collectCoreBindings` * Fixed CI test inline snapshots * Fixed invalid config parsing errors * Minor refactoring * Added basic unit tests for `throwMissingBindingError` * Overhauled binding type aggregation logic + added scoped environment env interfaces * Added changeset * Minor type generation test cleanup * Minor type generation test cleanup * Minor JSDoc tweaks * Minor type generation logic cleanup * Minor refactoring test fixes * Minor JSDoc fixes * Added `getEnvHeader` helper * Cleaned up `getEnvHeader` usage * Added basic unit tests for `getEnvHeader` * Renamed `checkPath` to `validateTypesFile` * Improved JSDoc comments for `validateTypesFile` * Refactored `throwMissingBindingError` to use options parameter * Updated `index` option to make it optional * Replaced `Array` with `string[]` * Unified to-level sentinel key * Add basic example to changeset --------- Co-authored-by: emily-shen <69125074+emily-shen@users.noreply.github.com> --- .changeset/tall-hairs-send.md | 56 + .../src/__tests__/type-generation.test.ts | 984 +++++- .../wrangler/src/type-generation/helpers.ts | 163 +- .../wrangler/src/type-generation/index.ts | 2706 +++++++++++++++-- 4 files changed, 3506 insertions(+), 403 deletions(-) create mode 100644 .changeset/tall-hairs-send.md diff --git a/.changeset/tall-hairs-send.md b/.changeset/tall-hairs-send.md new file mode 100644 index 000000000000..c4e8d519d2bc --- /dev/null +++ b/.changeset/tall-hairs-send.md @@ -0,0 +1,56 @@ +--- +"wrangler": minor +--- + +`wrangler types` now generates per-environment TypeScript interfaces when named environments exist in your configuration. + +When your configuration has named environments (an `env` object), `wrangler types` now generates both: + +- **Per-environment interfaces** (e.g., `StagingEnv`, `ProductionEnv`) containing only the bindings explicitly declared in each environment, plus inherited secrets +- **An aggregated `Env` interface** with all bindings from all environments (top-level + named environments), where: + - Bindings present in **all** environments are required + - Bindings not present in all environments are optional + - Secrets are always required (since they're inherited everywhere) + - Conflicting binding types across environments produce union types (e.g., `KVNamespace | R2Bucket`) + +However, if your config does not contain any environments, or you manually specify an environment via `--env`, `wrangler types` will continue to generate a single interface as before. + +**Example:** + +Given the following `wrangler.jsonc`: + +```jsonc +{ + "name": "my-worker", + "kv_namespaces": [ + { + "binding": "SHARED_KV", + "id": "abc123", + }, + ], + "env": { + "staging": { + "kv_namespaces": [ + { "binding": "SHARED_KV", "id": "staging-kv" }, + { "binding": "STAGING_CACHE", "id": "staging-cache" }, + ], + }, + }, +} +``` + +Running `wrangler types` will generate: + +```ts +declare namespace Cloudflare { + interface StagingEnv { + SHARED_KV: KVNamespace; + STAGING_CACHE: KVNamespace; + } + interface Env { + SHARED_KV: KVNamespace; // Required: in all environments + STAGING_CACHE?: KVNamespace; // Optional: only in staging + } +} +interface Env extends Cloudflare.Env {} +``` diff --git a/packages/wrangler/src/__tests__/type-generation.test.ts b/packages/wrangler/src/__tests__/type-generation.test.ts index ea83db91a66e..d439840ace09 100644 --- a/packages/wrangler/src/__tests__/type-generation.test.ts +++ b/packages/wrangler/src/__tests__/type-generation.test.ts @@ -14,6 +14,15 @@ import { generateImportSpecifier, isValidIdentifier, } from "../type-generation"; +import { + ENV_HEADER_COMMENT_PREFIX, + getEnvHeader, + throwMissingBindingError, + toEnvInterfaceName, + TOP_LEVEL_ENV_NAME, + toPascalCase, + validateEnvInterfaceNames, +} from "../type-generation/helpers"; import * as generateRuntime from "../type-generation/runtime"; import { dedent } from "../utils/dedent"; import { mockConsoleMethods } from "./helpers/mock-console"; @@ -92,6 +101,192 @@ describe("generateImportSpecifier", () => { }); }); +describe("getEnvHeader", () => { + it("should generate a header with the provided hash and command", () => { + const result = getEnvHeader("abc123", "wrangler types"); + expect(result).toBe( + `${ENV_HEADER_COMMENT_PREFIX} \`wrangler types\` (hash: abc123)` + ); + }); + + it("should include complex commands with flags", () => { + const result = getEnvHeader( + "def456", + "wrangler types --strict-vars=false --env-interface=MyEnv" + ); + expect(result).toBe( + `${ENV_HEADER_COMMENT_PREFIX} \`wrangler types --strict-vars=false --env-interface=MyEnv\` (hash: def456)` + ); + }); + + it("should handle empty hash", () => { + const result = getEnvHeader("", "wrangler types"); + expect(result).toBe( + `${ENV_HEADER_COMMENT_PREFIX} \`wrangler types\` (hash: )` + ); + }); + + it("should use process.argv when command is not provided", () => { + const originalArgv = process.argv; + process.argv = ["node", "wrangler", "types", "--include-runtime=false"]; + + try { + const result = getEnvHeader("xyz789"); + expect(result).toBe( + `${ENV_HEADER_COMMENT_PREFIX} \`wrangler types --include-runtime=false\` (hash: xyz789)` + ); + } finally { + process.argv = originalArgv; + } + }); +}); + +describe("toPascalCase", () => { + it("should convert simple strings to PascalCase", () => { + expect(toPascalCase("staging")).toBe("Staging"); + expect(toPascalCase("production")).toBe("Production"); + }); + + it("should convert kebab-case to PascalCase", () => { + expect(toPascalCase("my-prod-env")).toBe("MyProdEnv"); + expect(toPascalCase("staging-env")).toBe("StagingEnv"); + }); + + it("should convert snake_case to PascalCase", () => { + expect(toPascalCase("my_test_env")).toBe("MyTestEnv"); + expect(toPascalCase("prod_env")).toBe("ProdEnv"); + }); + + it("should handle mixed separators", () => { + expect(toPascalCase("my-test_env")).toBe("MyTestEnv"); + }); +}); + +describe("toEnvInterfaceName", () => { + it("should add Env suffix to environment names", () => { + expect(toEnvInterfaceName("staging")).toBe("StagingEnv"); + expect(toEnvInterfaceName("production")).toBe("ProductionEnv"); + }); + + it("should deduplicate Env suffix", () => { + expect(toEnvInterfaceName("staging-env")).toBe("StagingEnv"); + expect(toEnvInterfaceName("prod-env")).toBe("ProdEnv"); + expect(toEnvInterfaceName("my_env")).toBe("MyEnv"); + }); + + it("should handle kebab-case environment names", () => { + expect(toEnvInterfaceName("my-prod")).toBe("MyProdEnv"); + expect(toEnvInterfaceName("test-staging")).toBe("TestStagingEnv"); + }); +}); + +describe("validateEnvInterfaceNames", () => { + it("should not throw for valid, unique environment names", () => { + expect(() => + validateEnvInterfaceNames(["staging", "production", "dev"]) + ).not.toThrow(); + }); + + it("should throw for reserved name Env", () => { + expect(() => validateEnvInterfaceNames(["env"])).toThrowError( + /Environment name "env" converts to reserved interface name "Env"/ + ); + }); + + it("should throw when two environment names convert to the same interface name", () => { + // Both staging-env and staging_env convert to StagingEnv + expect(() => + validateEnvInterfaceNames(["staging-env", "staging_env"]) + ).toThrowError( + /Environment names "staging-env" and "staging_env" both convert to interface name "StagingEnv"/ + ); + }); + + it("should throw when names with different separators collide", () => { + expect(() => + validateEnvInterfaceNames(["my-prod", "my_prod"]) + ).toThrowError( + /Environment names "my-prod" and "my_prod" both convert to interface name "MyProdEnv"/ + ); + }); +}); + +describe("throwMissingBindingError", () => { + it("should throw a `UserError` for top-level bindings with array index", () => { + expect(() => + throwMissingBindingError({ + binding: { id: "1234" }, + bindingType: "kv_namespaces", + configPath: "wrangler.json", + envName: TOP_LEVEL_ENV_NAME, + fieldName: "binding", + index: 0, + }) + ).toThrowError( + 'Processing wrangler.json configuration:\n - "kv_namespaces[0]" bindings should have a string "binding" field but got {"id":"1234"}.' + ); + }); + + it("should throw a `UserError` for environment bindings with array index", () => { + expect(() => + throwMissingBindingError({ + binding: { database_id: "abc123" }, + bindingType: "d1_databases", + configPath: "wrangler.json", + envName: "production", + fieldName: "binding", + index: 2, + }) + ).toThrowError( + 'Processing wrangler.json configuration:\n - "env.production" environment configuration\n - "env.production.d1_databases[2]" bindings should have a string "binding" field but got {"database_id":"abc123"}.' + ); + }); + + it("should handle non-array bindings (index omitted)", () => { + expect(() => + throwMissingBindingError({ + binding: {}, + bindingType: "ai", + configPath: "wrangler.json", + envName: TOP_LEVEL_ENV_NAME, + fieldName: "binding", + }) + ).toThrowError( + 'Processing wrangler.json configuration:\n - "ai" bindings should have a string "binding" field but got {}.' + ); + }); + + it("should handle undefined config path", () => { + expect(() => + throwMissingBindingError({ + binding: {}, + bindingType: "kv_namespaces", + configPath: undefined, + envName: TOP_LEVEL_ENV_NAME, + fieldName: "binding", + index: 0, + }) + ).toThrowError( + 'Processing Wrangler configuration configuration:\n - "kv_namespaces[0]" bindings should have a string "binding" field but got {}.' + ); + }); + + it("should handle different field names", () => { + expect(() => + throwMissingBindingError({ + binding: { type: "ratelimit" }, + bindingType: "unsafe", + configPath: "wrangler.json", + envName: "staging", + fieldName: "name", + index: 1, + }) + ).toThrowError( + 'Processing wrangler.json configuration:\n - "env.staging" environment configuration\n - "env.staging.unsafe[1]" bindings should have a string "name" field but got {"type":"ratelimit"}.' + ); + }); +}); + const bindingsConfigMock: Omit< EnvironmentNonInheritable, "define" | "tail_consumers" | "cloudchamber" @@ -512,6 +707,28 @@ describe("generate types", () => { } interface Env { TEST_KV_NAMESPACE: KVNamespace; + R2_BUCKET_BINDING: R2Bucket; + D1_TESTING_SOMETHING: D1Database; + VECTORIZE_BINDING: VectorizeIndex; + HYPERDRIVE_BINDING: Hyperdrive; + SEND_EMAIL_BINDING: SendEmail; + AE_DATASET_BINDING: AnalyticsEngineDataset; + NAMESPACE_BINDING: DispatchNamespace; + MTLS_BINDING: Fetcher; + TEST_QUEUE_BINDING: Queue; + SECRET: SecretsStoreSecret; + HELLO_WORLD: HelloWorldBinding; + RATE_LIMITER: RateLimit; + WORKER_LOADER_BINDING: WorkerLoader; + VPC_SERVICE_BINDING: Fetcher; + PIPELINE: import(\\"cloudflare:pipelines\\").Pipeline; + LOGFWDR_SCHEMA: any; + BROWSER_BINDING: Fetcher; + AI_BINDING: Ai; + IMAGES_BINDING: ImagesBinding; + MEDIA_BINDING: MediaBinding; + VERSION_METADATA_BINDING: WorkerVersionMetadata; + ASSETS_BINDING: Fetcher; SOMETHING: \\"asdasdfasdf\\"; ANOTHER: \\"thing\\"; OBJECT_VAR: {\\"enterprise\\":\\"1701-D\\",\\"activeDuty\\":true,\\"captain\\":\\"Picard\\"}; @@ -521,38 +738,16 @@ describe("generate types", () => { DURABLE_NO_EXPORT: DurableObjectNamespace /* DurableNoexport */; DURABLE_EXTERNAL_UNKNOWN_ENTRY: DurableObjectNamespace /* DurableExternal from external-worker */; DURABLE_EXTERNAL_PROVIDED_ENTRY: DurableObjectNamespace /* RealDurableExternal from service_name_2 */; - R2_BUCKET_BINDING: R2Bucket; - D1_TESTING_SOMETHING: D1Database; - SECRET: SecretsStoreSecret; - HELLO_WORLD: HelloWorldBinding; - RATE_LIMITER: RateLimit; - WORKER_LOADER_BINDING: WorkerLoader; SERVICE_BINDING: Fetcher /* service_name */; OTHER_SERVICE_BINDING: Service /* entrypoint FakeEntrypoint from service_name_2 */; OTHER_SERVICE_BINDING_ENTRYPOINT: Service /* entrypoint RealEntrypoint from service_name_2 */; - AE_DATASET_BINDING: AnalyticsEngineDataset; - NAMESPACE_BINDING: DispatchNamespace; - LOGFWDR_SCHEMA: any; + MY_WORKFLOW: Workflow[0]['payload']>; + testing_unsafe: any; + UNSAFE_RATELIMIT: RateLimit; SOME_DATA_BLOB1: ArrayBuffer; SOME_DATA_BLOB2: ArrayBuffer; SOME_TEXT_BLOB1: string; SOME_TEXT_BLOB2: string; - testing_unsafe: any; - UNSAFE_RATELIMIT: RateLimit; - TEST_QUEUE_BINDING: Queue; - SEND_EMAIL_BINDING: SendEmail; - VECTORIZE_BINDING: VectorizeIndex; - HYPERDRIVE_BINDING: Hyperdrive; - VPC_SERVICE_BINDING: Fetcher; - MTLS_BINDING: Fetcher; - BROWSER_BINDING: Fetcher; - AI_BINDING: Ai; - IMAGES_BINDING: ImagesBinding; - MEDIA_BINDING: MediaBinding; - VERSION_METADATA_BINDING: WorkerVersionMetadata; - ASSETS_BINDING: Fetcher; - MY_WORKFLOW: Workflow[0]['payload']>; - PIPELINE: import(\\"cloudflare:pipelines\\").Pipeline; } } interface Env extends Cloudflare.Env {} @@ -622,6 +817,28 @@ describe("generate types", () => { } interface Env { TEST_KV_NAMESPACE: KVNamespace; + R2_BUCKET_BINDING: R2Bucket; + D1_TESTING_SOMETHING: D1Database; + VECTORIZE_BINDING: VectorizeIndex; + HYPERDRIVE_BINDING: Hyperdrive; + SEND_EMAIL_BINDING: SendEmail; + AE_DATASET_BINDING: AnalyticsEngineDataset; + NAMESPACE_BINDING: DispatchNamespace; + MTLS_BINDING: Fetcher; + TEST_QUEUE_BINDING: Queue; + SECRET: SecretsStoreSecret; + HELLO_WORLD: HelloWorldBinding; + RATE_LIMITER: RateLimit; + WORKER_LOADER_BINDING: WorkerLoader; + VPC_SERVICE_BINDING: Fetcher; + PIPELINE: import(\\"cloudflare:pipelines\\").Pipeline; + LOGFWDR_SCHEMA: any; + BROWSER_BINDING: Fetcher; + AI_BINDING: Ai; + IMAGES_BINDING: ImagesBinding; + MEDIA_BINDING: MediaBinding; + VERSION_METADATA_BINDING: WorkerVersionMetadata; + ASSETS_BINDING: Fetcher; SOMETHING: \\"asdasdfasdf\\"; ANOTHER: \\"thing\\"; OBJECT_VAR: {\\"enterprise\\":\\"1701-D\\",\\"activeDuty\\":true,\\"captain\\":\\"Picard\\"}; @@ -632,38 +849,16 @@ describe("generate types", () => { DURABLE_NO_EXPORT: DurableObjectNamespace /* DurableNoexport */; DURABLE_EXTERNAL_UNKNOWN_ENTRY: DurableObjectNamespace /* DurableExternal from external-worker */; DURABLE_EXTERNAL_PROVIDED_ENTRY: DurableObjectNamespace /* RealDurableExternal from service_name_2 */; - R2_BUCKET_BINDING: R2Bucket; - D1_TESTING_SOMETHING: D1Database; - SECRET: SecretsStoreSecret; - HELLO_WORLD: HelloWorldBinding; - RATE_LIMITER: RateLimit; - WORKER_LOADER_BINDING: WorkerLoader; SERVICE_BINDING: Fetcher /* service_name */; OTHER_SERVICE_BINDING: Service /* entrypoint FakeEntrypoint from service_name_2 */; OTHER_SERVICE_BINDING_ENTRYPOINT: Service /* entrypoint RealEntrypoint from service_name_2 */; - AE_DATASET_BINDING: AnalyticsEngineDataset; - NAMESPACE_BINDING: DispatchNamespace; - LOGFWDR_SCHEMA: any; + MY_WORKFLOW: Workflow[0]['payload']>; + testing_unsafe: any; + UNSAFE_RATELIMIT: RateLimit; SOME_DATA_BLOB1: ArrayBuffer; SOME_DATA_BLOB2: ArrayBuffer; SOME_TEXT_BLOB1: string; SOME_TEXT_BLOB2: string; - testing_unsafe: any; - UNSAFE_RATELIMIT: RateLimit; - TEST_QUEUE_BINDING: Queue; - SEND_EMAIL_BINDING: SendEmail; - VECTORIZE_BINDING: VectorizeIndex; - HYPERDRIVE_BINDING: Hyperdrive; - VPC_SERVICE_BINDING: Fetcher; - MTLS_BINDING: Fetcher; - BROWSER_BINDING: Fetcher; - AI_BINDING: Ai; - IMAGES_BINDING: ImagesBinding; - MEDIA_BINDING: MediaBinding; - VERSION_METADATA_BINDING: WorkerVersionMetadata; - ASSETS_BINDING: Fetcher; - MY_WORKFLOW: Workflow[0]['payload']>; - PIPELINE: import(\\"cloudflare:pipelines\\").Pipeline; } } interface Env extends Cloudflare.Env {} @@ -797,6 +992,28 @@ describe("generate types", () => { } interface Env { TEST_KV_NAMESPACE: KVNamespace; + R2_BUCKET_BINDING: R2Bucket; + D1_TESTING_SOMETHING: D1Database; + VECTORIZE_BINDING: VectorizeIndex; + HYPERDRIVE_BINDING: Hyperdrive; + SEND_EMAIL_BINDING: SendEmail; + AE_DATASET_BINDING: AnalyticsEngineDataset; + NAMESPACE_BINDING: DispatchNamespace; + MTLS_BINDING: Fetcher; + TEST_QUEUE_BINDING: Queue; + SECRET: SecretsStoreSecret; + HELLO_WORLD: HelloWorldBinding; + RATE_LIMITER: RateLimit; + WORKER_LOADER_BINDING: WorkerLoader; + VPC_SERVICE_BINDING: Fetcher; + PIPELINE: import(\\"cloudflare:pipelines\\").Pipeline; + LOGFWDR_SCHEMA: any; + BROWSER_BINDING: Fetcher; + AI_BINDING: Ai; + IMAGES_BINDING: ImagesBinding; + MEDIA_BINDING: MediaBinding; + VERSION_METADATA_BINDING: WorkerVersionMetadata; + ASSETS_BINDING: Fetcher; SOMETHING: \\"asdasdfasdf\\"; ANOTHER: \\"thing\\"; OBJECT_VAR: {\\"enterprise\\":\\"1701-D\\",\\"activeDuty\\":true,\\"captain\\":\\"Picard\\"}; @@ -807,38 +1024,16 @@ describe("generate types", () => { DURABLE_NO_EXPORT: DurableObjectNamespace /* DurableNoexport */; DURABLE_EXTERNAL_UNKNOWN_ENTRY: DurableObjectNamespace /* DurableExternal from external-worker */; DURABLE_EXTERNAL_PROVIDED_ENTRY: DurableObjectNamespace; - R2_BUCKET_BINDING: R2Bucket; - D1_TESTING_SOMETHING: D1Database; - SECRET: SecretsStoreSecret; - HELLO_WORLD: HelloWorldBinding; - RATE_LIMITER: RateLimit; - WORKER_LOADER_BINDING: WorkerLoader; SERVICE_BINDING: Service; OTHER_SERVICE_BINDING: Service /* entrypoint FakeEntrypoint from service_name_2 */; OTHER_SERVICE_BINDING_ENTRYPOINT: Service; - AE_DATASET_BINDING: AnalyticsEngineDataset; - NAMESPACE_BINDING: DispatchNamespace; - LOGFWDR_SCHEMA: any; + MY_WORKFLOW: Workflow[0]['payload']>; + testing_unsafe: any; + UNSAFE_RATELIMIT: RateLimit; SOME_DATA_BLOB1: ArrayBuffer; SOME_DATA_BLOB2: ArrayBuffer; SOME_TEXT_BLOB1: string; SOME_TEXT_BLOB2: string; - testing_unsafe: any; - UNSAFE_RATELIMIT: RateLimit; - TEST_QUEUE_BINDING: Queue; - SEND_EMAIL_BINDING: SendEmail; - VECTORIZE_BINDING: VectorizeIndex; - HYPERDRIVE_BINDING: Hyperdrive; - VPC_SERVICE_BINDING: Fetcher; - MTLS_BINDING: Fetcher; - BROWSER_BINDING: Fetcher; - AI_BINDING: Ai; - IMAGES_BINDING: ImagesBinding; - MEDIA_BINDING: MediaBinding; - VERSION_METADATA_BINDING: WorkerVersionMetadata; - ASSETS_BINDING: Fetcher; - MY_WORKFLOW: Workflow[0]['payload']>; - PIPELINE: import(\\"cloudflare:pipelines\\").Pipeline; } } interface Env extends Cloudflare.Env {} @@ -1501,11 +1696,20 @@ describe("generate types", () => { Generating project types... declare namespace Cloudflare { - interface Env { + interface ProductionEnv { MY_VAR: \\"a var\\"; - MY_VAR_A: \\"A (dev)\\" | \\"A (prod)\\" | \\"A (stag)\\"; - MY_VAR_B: {\\"value\\":\\"B (dev)\\"} | {\\"value\\":\\"B (prod)\\"}; - MY_VAR_C: [\\"a\\",\\"b\\",\\"c\\"] | [1,2,3]; + MY_VAR_A: \\"A (prod)\\"; + MY_VAR_B: {\\"value\\":\\"B (prod)\\"}; + MY_VAR_C: [1,2,3]; + } + interface StagingEnv { + MY_VAR_A: \\"A (stag)\\"; + } + interface Env { + MY_VAR?: \\"a var\\"; + MY_VAR_A: \\"A (prod)\\" | \\"A (stag)\\" | \\"A (dev)\\"; + MY_VAR_B?: {\\"value\\":\\"B (prod)\\"} | {\\"value\\":\\"B (dev)\\"}; + MY_VAR_C?: [1,2,3] | [\\"a\\",\\"b\\",\\"c\\"]; } } interface Env extends Cloudflare.Env {} @@ -1528,11 +1732,20 @@ describe("generate types", () => { Generating project types... declare namespace Cloudflare { - interface Env { + interface ProductionEnv { MY_VAR: string; MY_VAR_A: string; MY_VAR_B: object; - MY_VAR_C: string[] | number[]; + MY_VAR_C: number[]; + } + interface StagingEnv { + MY_VAR_A: string; + } + interface Env { + MY_VAR?: string; + MY_VAR_A: string; + MY_VAR_B?: object; + MY_VAR_C?: number[] | string[]; } } interface Env extends Cloudflare.Env {} @@ -1546,6 +1759,615 @@ describe("generate types", () => { }); }); + describe("bindings present in multiple environments", () => { + it("should collect bindings from all environments when no --env is specified", async () => { + fs.writeFileSync( + "./wrangler.jsonc", + JSON.stringify({ + d1_databases: [ + { + binding: "D1_TOP", + database_id: "top-d1-id", + database_name: "top", + }, + ], + kv_namespaces: [ + { + binding: "KV_TOP", + id: "top-kv-id", + }, + ], + env: { + staging: { + d1_databases: [ + { + binding: "D1_STAGING", + database_id: "staging-d1-id", + database_name: "staging", + }, + ], + kv_namespaces: [ + { + binding: "KV_STAGING", + id: "staging-kv-id", + }, + ], + }, + production: { + kv_namespaces: [ + { + binding: "KV_PROD", + id: "prod-kv-id", + }, + ], + r2_buckets: [ + { + binding: "R2_PROD", + bucket_name: "prod-bucket", + }, + ], + }, + }, + }), + "utf-8" + ); + + await runWrangler("types --include-runtime=false"); + + expect(std.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Generating project types... + + declare namespace Cloudflare { + interface StagingEnv { + KV_STAGING: KVNamespace; + D1_STAGING: D1Database; + } + interface ProductionEnv { + KV_PROD: KVNamespace; + R2_PROD: R2Bucket; + } + interface Env { + KV_STAGING?: KVNamespace; + D1_STAGING?: D1Database; + KV_PROD?: KVNamespace; + R2_PROD?: R2Bucket; + KV_TOP?: KVNamespace; + D1_TOP?: D1Database; + } + } + interface Env extends Cloudflare.Env {} + + ──────────────────────────────────────────────────────────── + ✨ Types written to worker-configuration.d.ts + + 📣 Remember to rerun 'wrangler types' after you change your wrangler.jsonc file. + " + `); + }); + + it("should only collect bindings from the specified environment when --env is used", async () => { + fs.writeFileSync( + "./wrangler.jsonc", + JSON.stringify({ + kv_namespaces: [ + { + binding: "KV_TOP", + id: "top-kv-id", + }, + ], + env: { + staging: { + d1_databases: [ + { + binding: "D1_STAGING", + database_id: "staging-d1-id", + database_name: "staging", + }, + ], + kv_namespaces: [ + { + binding: "KV_STAGING", + id: "staging-kv-id", + }, + ], + }, + production: { + kv_namespaces: [ + { + binding: "KV_PROD", + id: "prod-kv-id", + }, + ], + }, + }, + }), + "utf-8" + ); + + await runWrangler("types --include-runtime=false --env staging"); + + expect(std.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Generating project types... + + declare namespace Cloudflare { + interface Env { + KV_STAGING: KVNamespace; + D1_STAGING: D1Database; + } + } + interface Env extends Cloudflare.Env {} + + ──────────────────────────────────────────────────────────── + ✨ Types written to worker-configuration.d.ts + + 📣 Remember to rerun 'wrangler types' after you change your wrangler.jsonc file. + " + `); + }); + + it("should deduplicate bindings with the same name across environments", async () => { + fs.writeFileSync( + "./wrangler.jsonc", + JSON.stringify({ + kv_namespaces: [ + { + binding: "MY_KV", + id: "top-kv-id", + }, + ], + env: { + staging: { + kv_namespaces: [ + { + binding: "MY_KV", + id: "staging-kv-id", + }, + ], + }, + production: { + kv_namespaces: [ + { + binding: "MY_KV", + id: "prod-kv-id", + }, + ], + }, + }, + }), + "utf-8" + ); + + await runWrangler("types --include-runtime=false"); + + expect(std.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Generating project types... + + declare namespace Cloudflare { + interface StagingEnv { + MY_KV: KVNamespace; + } + interface ProductionEnv { + MY_KV: KVNamespace; + } + interface Env { + MY_KV: KVNamespace; + } + } + interface Env extends Cloudflare.Env {} + + ──────────────────────────────────────────────────────────── + ✨ Types written to worker-configuration.d.ts + + 📣 Remember to rerun 'wrangler types' after you change your wrangler.jsonc file. + " + `); + }); + + it("should produce union types when binding name has different types across environments", async () => { + fs.writeFileSync( + "./wrangler.jsonc", + JSON.stringify({ + kv_namespaces: [ + { + binding: "CACHE", + id: "kv-id", + }, + ], + env: { + staging: { + r2_buckets: [ + { + binding: "CACHE", + bucket_name: "r2-bucket", + }, + ], + }, + }, + }), + "utf-8" + ); + + await runWrangler("types --include-runtime=false"); + + expect(std.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Generating project types... + + declare namespace Cloudflare { + interface StagingEnv { + CACHE: R2Bucket; + } + interface Env { + CACHE: R2Bucket | KVNamespace; + } + } + interface Env extends Cloudflare.Env {} + + ──────────────────────────────────────────────────────────── + ✨ Types written to worker-configuration.d.ts + + 📣 Remember to rerun 'wrangler types' after you change your wrangler.jsonc file. + " + `); + }); + + it("should error when a binding is missing its binding name in an environment", async () => { + fs.writeFileSync( + "./wrangler.jsonc", + JSON.stringify({ + kv_namespaces: [ + { + binding: "KV_TOP", + id: "top-kv-id", + }, + ], + env: { + staging: { + // Empty object with no binding property should error + kv_namespaces: [{}], + }, + }, + }), + "utf-8" + ); + + await expect( + runWrangler("types --include-runtime=false") + ).rejects.toThrowError( + /Processing wrangler\.jsonc configuration:\n\s+- "env\.staging" environment configuration\n\s+- "env\.staging\.kv_namespaces\[0\]" bindings should have a string "binding" field but got \{\}/ + ); + }); + + it("should error when a binding is missing its binding name at top-level", async () => { + fs.writeFileSync( + "./wrangler.jsonc", + JSON.stringify({ + r2_buckets: [ + { + bucket_name: "my-bucket", + }, + ], + }), + "utf-8" + ); + + await expect( + runWrangler("types --include-runtime=false") + ).rejects.toThrowError( + /Processing wrangler\.jsonc configuration:\n\s+- "r2_buckets\[0\]" bindings should have a string "binding" field/ + ); + }); + + it("should collect vars only from specified environment with --env", async () => { + fs.writeFileSync( + "./wrangler.jsonc", + JSON.stringify({ + vars: { + MY_VAR: "top-level", + }, + env: { + staging: { + vars: { + MY_VAR: "staging", + STAGING_ONLY: "staging-only-value", + }, + }, + production: { + vars: { + MY_VAR: "production", + PROD_ONLY: "prod-only-value", + }, + }, + }, + }), + "utf-8" + ); + + await runWrangler("types --include-runtime=false --env staging"); + + expect(std.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Generating project types... + + declare namespace Cloudflare { + interface Env { + MY_VAR: \\"staging\\"; + STAGING_ONLY: \\"staging-only-value\\"; + } + } + interface Env extends Cloudflare.Env {} + + ──────────────────────────────────────────────────────────── + ✨ Types written to worker-configuration.d.ts + + 📣 Remember to rerun 'wrangler types' after you change your wrangler.jsonc file. + " + `); + }); + + it("should mark bindings as optional if not present in all environments", async () => { + fs.writeFileSync( + "./wrangler.jsonc", + JSON.stringify({ + kv_namespaces: [{ binding: "KV_SHARED", id: "top-kv" }], + env: { + staging: { + kv_namespaces: [ + { binding: "KV_SHARED", id: "staging-kv" }, + { binding: "KV_STAGING_ONLY", id: "staging-only" }, + ], + }, + production: { + kv_namespaces: [ + { binding: "KV_SHARED", id: "prod-kv" }, + { binding: "KV_PROD_ONLY", id: "prod-only" }, + ], + }, + }, + }), + "utf-8" + ); + + await runWrangler("types --include-runtime=false"); + + expect(std.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Generating project types... + + declare namespace Cloudflare { + interface StagingEnv { + KV_SHARED: KVNamespace; + KV_STAGING_ONLY: KVNamespace; + } + interface ProductionEnv { + KV_SHARED: KVNamespace; + KV_PROD_ONLY: KVNamespace; + } + interface Env { + KV_SHARED: KVNamespace; + KV_STAGING_ONLY?: KVNamespace; + KV_PROD_ONLY?: KVNamespace; + } + } + interface Env extends Cloudflare.Env {} + + ──────────────────────────────────────────────────────────── + ✨ Types written to worker-configuration.d.ts + + 📣 Remember to rerun 'wrangler types' after you change your wrangler.jsonc file. + " + `); + }); + + it("should not include top-level bindings in per-environment interfaces", async () => { + fs.writeFileSync( + "./wrangler.jsonc", + JSON.stringify({ + kv_namespaces: [{ binding: "KV_TOP_ONLY", id: "top-kv" }], + env: { + staging: { + d1_databases: [ + { + binding: "D1_STAGING", + database_id: "staging-d1", + database_name: "staging", + }, + ], + }, + }, + }), + "utf-8" + ); + + await runWrangler("types --include-runtime=false"); + + expect(std.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Generating project types... + + declare namespace Cloudflare { + interface StagingEnv { + D1_STAGING: D1Database; + } + interface Env { + D1_STAGING?: D1Database; + KV_TOP_ONLY?: KVNamespace; + } + } + interface Env extends Cloudflare.Env {} + + ──────────────────────────────────────────────────────────── + ✨ Types written to worker-configuration.d.ts + + 📣 Remember to rerun 'wrangler types' after you change your wrangler.jsonc file. + " + `); + }); + + it("should include secrets in per-environment interfaces since they are inherited", async () => { + fs.writeFileSync( + "./wrangler.jsonc", + JSON.stringify({ + env: { + staging: { + kv_namespaces: [{ binding: "KV_STAGING", id: "staging-kv" }], + }, + }, + }), + "utf-8" + ); + + fs.writeFileSync("./.dev.vars", "MY_SECRET=secret-value\n", "utf-8"); + + await runWrangler("types --include-runtime=false"); + + expect(std.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Generating project types... + + declare namespace Cloudflare { + interface StagingEnv { + KV_STAGING: KVNamespace; + MY_SECRET: string; + } + interface Env { + MY_SECRET: string; + KV_STAGING?: KVNamespace; + } + } + interface Env extends Cloudflare.Env {} + + ──────────────────────────────────────────────────────────── + ✨ Types written to worker-configuration.d.ts + + 📣 Remember to rerun 'wrangler types' after you change your wrangler.jsonc file. + " + `); + }); + + it("should produce union types for vars with different values across environments", async () => { + fs.writeFileSync( + "./wrangler.jsonc", + JSON.stringify({ + vars: { DEBUG: "false" }, + env: { + staging: { + vars: { DEBUG: "true" }, + }, + production: { + vars: { DEBUG: "false" }, + }, + }, + }), + "utf-8" + ); + + await runWrangler("types --include-runtime=false"); + + expect(std.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Generating project types... + + declare namespace Cloudflare { + interface StagingEnv { + DEBUG: \\"true\\"; + } + interface ProductionEnv { + DEBUG: \\"false\\"; + } + interface Env { + DEBUG: \\"true\\" | \\"false\\"; + } + } + interface Env extends Cloudflare.Env {} + + ──────────────────────────────────────────────────────────── + ✨ Types written to worker-configuration.d.ts + + 📣 Remember to rerun 'wrangler types' after you change your wrangler.jsonc file. + " + `); + }); + + it("should generate simple Env when no environments are defined", async () => { + fs.writeFileSync( + "./wrangler.jsonc", + JSON.stringify({ + kv_namespaces: [{ binding: "MY_KV", id: "kv-id" }], + vars: { MY_VAR: "value" }, + }), + "utf-8" + ); + + await runWrangler("types --include-runtime=false"); + + expect(std.out).toMatchInlineSnapshot(` + " + ⛅️ wrangler x.x.x + ────────────────── + Generating project types... + + declare namespace Cloudflare { + interface Env { + MY_KV: KVNamespace; + MY_VAR: \\"value\\"; + } + } + interface Env extends Cloudflare.Env {} + + ──────────────────────────────────────────────────────────── + ✨ Types written to worker-configuration.d.ts + + 📣 Remember to rerun 'wrangler types' after you change your wrangler.jsonc file. + " + `); + }); + + it("should error when environment names would collide after conversion", async () => { + fs.writeFileSync( + "./wrangler.jsonc", + JSON.stringify({ + env: { + "my-env": { + vars: { A: "a" }, + }, + my_env: { + vars: { B: "b" }, + }, + }, + }), + "utf-8" + ); + + await expect( + runWrangler("types --include-runtime=false") + ).rejects.toThrowError( + /Environment names "my-env" and "my_env" both convert to interface name "MyEnv"/ + ); + }); + }); + describe("customization", () => { describe("env", () => { it("should allow the user to customize the interface name", async () => { diff --git a/packages/wrangler/src/type-generation/helpers.ts b/packages/wrangler/src/type-generation/helpers.ts index 4e174c019e42..9cf85d183843 100644 --- a/packages/wrangler/src/type-generation/helpers.ts +++ b/packages/wrangler/src/type-generation/helpers.ts @@ -1,5 +1,9 @@ import { readFileSync, writeFileSync } from "node:fs"; -import { ParseError, UserError } from "@cloudflare/workers-utils"; +import { + configFileName, + ParseError, + UserError, +} from "@cloudflare/workers-utils"; import { version } from "workerd"; import yargs from "yargs"; import { getEntry } from "../deployment-bundle/entry"; @@ -15,6 +19,76 @@ export const ENV_HEADER_COMMENT_PREFIX = "// Generated by Wrangler by running"; export const RUNTIME_HEADER_COMMENT_PREFIX = "// Runtime types generated with workerd@"; +/** + * Sentinel value used to identify top-level (non-environment) bindings when collecting bindings. + */ +export const TOP_LEVEL_ENV_NAME = "$top-level"; + +export interface ThrowMissingBindingErrorOptions { + /** The actual binding object for error context */ + binding: unknown; + /** The type of binding (e.g., "kv_namespaces", "d1_databases") */ + bindingType: string; + /** The path to the config file */ + configPath: string | undefined; + /** The environment name where the invalid binding was found */ + envName: string; + /** The name of the missing field (e.g., "binding", "name") */ + fieldName: string; + /** The index of the binding in the array (0-based), or omit for non-array bindings */ + index?: number; +} + +/** + * Throws a UserError when a binding is missing its required property. + * + * The error format matches the existing config validation errors for consistency. + * + * @param options - The options for constructing the error message + * + * @throws {UserError} Always throws with a formatted error message + */ +export const throwMissingBindingError = ({ + binding, + bindingType, + configPath, + envName, + fieldName, + index, +}: ThrowMissingBindingErrorOptions): never => { + const isArrayBinding = index !== undefined; + const bindingPath = isArrayBinding ? `${bindingType}[${index}]` : bindingType; + const isTopLevel = envName === TOP_LEVEL_ENV_NAME; + const field = isTopLevel ? bindingPath : `env.${envName}.${bindingPath}`; + const bindingError = `"${field}" bindings should have a string "${fieldName}" field but got ${JSON.stringify(binding)}.`; + + const configFile = configFileName(configPath); + + if (isTopLevel) { + throw new UserError( + `Processing ${configFile} configuration:\n - ${bindingError}` + ); + } + + throw new UserError( + `Processing ${configFile} configuration:\n - "env.${envName}" environment configuration\n - ${bindingError}` + ); +}; + +/** + * Generates the environment header string used in the generated types file. + * + * @param hash - The hash representing the environment configuration. + * @param command - The wrangler command used to generate the types. + * + * @returns A string containing the comment outlining the generated environment types. + */ +export const getEnvHeader = (hash: string, command?: string): string => { + const wranglerCommand = + command ?? ["wrangler", ...process.argv.slice(2)].join(" "); + return `${ENV_HEADER_COMMENT_PREFIX} \`${wranglerCommand}\` (hash: ${hash})`; +}; + /** * Generates the runtime header string used in the generated types file. * This header is used to detect when runtime types need to be regenerated. @@ -279,3 +353,90 @@ export const checkTypesDiff = async (config: Config, entry: Entry) => { return true; }; + +/** + * Converts a string to PascalCase. + * + * @param str - The input string to convert. + * + * @returns The PascalCase version of the input string. + * + * @example + * ```ts + * toPascalCase("staging") // "Staging" + * toPascalCase("my-prod-env") // "MyProdEnv" + * toPascalCase("my_test_env") // "MyTestEnv" + * ``` + */ +export const toPascalCase = (str: string): string => + str + .split(/[-_]/) + .map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()) + .join(""); + +/** + * Converts an environment name to an interface name. + * + * Handles the `Env` suffix deduplication. + * + * @example + * ```ts + * toEnvInterfaceName("staging") // "StagingEnv" + * toEnvInterfaceName("my-prod-env") // "MyProdEnv" + * toEnvInterfaceName("prod-env") // "ProdEnv" + * toEnvInterfaceName("testing") // "TestingEnv" + * ``` + */ +export const toEnvInterfaceName = (envName: string): string => { + const pascalCase = toPascalCase(envName); + + if (pascalCase.endsWith("Env")) { + return pascalCase; + } + + return `${pascalCase}Env`; +}; + +/** + * Reserved interface names that cannot be used as environment names. + * Note: Only interface names that can actually be generated by toEnvInterfaceName + * need to be reserved (i.e., names ending in "Env"). + */ +const RESERVED_INTERFACE_NAMES = new Set(["Env"]); + +/** + * Validates that environment interface names don't conflict with reserved names + * or with each other after PascalCase conversion. + * + * @param envNames - Array of environment names from the config + * + * @throws {UserError} If a reserved name is used or if there are naming collisions + */ +export const validateEnvInterfaceNames = (envNames: Array): void => { + const interfaceNames = new Map(); + + for (const reserved of RESERVED_INTERFACE_NAMES) { + interfaceNames.set(reserved, `(reserved)`); + } + + for (const envName of envNames) { + const interfaceName = toEnvInterfaceName(envName); + + const existingEnv = interfaceNames.get(interfaceName); + if (existingEnv) { + if (existingEnv === "(reserved)") { + throw new UserError( + `Environment name "${envName}" converts to reserved interface name "${interfaceName}". ` + + `Please rename this environment to avoid conflicts.` + ); + } + + throw new UserError( + `Environment names "${existingEnv}" and "${envName}" both convert to interface name "${interfaceName}". ` + + `Please rename one of these environments to avoid conflicts.` + ); + } + + interfaceNames.set(interfaceName, envName); + } +}; diff --git a/packages/wrangler/src/type-generation/index.ts b/packages/wrangler/src/type-generation/index.ts index 6e1be8ee2343..3d3f4692f199 100644 --- a/packages/wrangler/src/type-generation/index.ts +++ b/packages/wrangler/src/type-generation/index.ts @@ -19,7 +19,15 @@ import { getDurableObjectClassNameToUseSQLiteMap } from "../dev/class-names-sqli import { getVarsForDev } from "../dev/dev-vars"; import { logger } from "../logger"; import { isProcessEnvPopulated } from "../process-env"; -import { checkTypesUpToDate, DEFAULT_WORKERS_TYPES_FILE_NAME } from "./helpers"; +import { + checkTypesUpToDate, + DEFAULT_WORKERS_TYPES_FILE_NAME, + getEnvHeader, + throwMissingBindingError, + toEnvInterfaceName, + TOP_LEVEL_ENV_NAME, + validateEnvInterfaceNames, +} from "./helpers"; import { generateRuntimeTypes } from "./runtime"; import { logRuntimeTypesMessage } from "./runtime/log-runtime-types-message"; import type { Entry } from "../deployment-bundle/entry"; @@ -115,7 +123,7 @@ export const typesCommand = createCommand({ ); } - checkPath(args.path); + validateTypesFile(args.path); if (!args.includeEnv && !args.includeRuntime) { throw new CommandLineArgsError( @@ -312,12 +320,23 @@ export function generateImportSpecifier(from: string, to: string) { } } -type Secrets = Record; - -type ConfigToDTS = Partial> & { vars: VarTypes } & { - secrets: Secrets; -}; - +/** + * Generates TypeScript environment type definitions from a Wrangler configuration. + * + * This function collects all bindings (KV, R2, D1, Durable Objects, Services, etc.), + * variables, and secrets from the config and produces TypeScript type declarations + * for the `Env` interface used by Cloudflare Workers. + * + * @param config - The parsed Wrangler configuration object + * @param args - CLI arguments passed to the `types` command + * @param envInterface - The name of the generated environment interface (default: "Env") + * @param outputPath - The file path where the generated types will be written + * @param entrypoint - Optional entry point information for the Worker + * @param serviceEntries - Optional map of service names to their entry points for cross-worker type generation + * @param log - Whether to log output to the console (default: true) + * + * @returns An object containing the generated header comment and type definitions, or undefined values if no types were generated + */ export async function generateEnvTypes( config: Config, args: Partial<(typeof typesCommand)["args"]>, @@ -327,7 +346,6 @@ export async function generateEnvTypes( serviceEntries?: Map, log = true ): Promise<{ envHeader?: string; envTypes?: string }> { - const stringKeys: string[] = []; const secrets = getVarsForDev( config.userConfigPath, args.envFile, @@ -339,46 +357,12 @@ export async function generateEnvTypes( true ) as Record; - const configToDTS: ConfigToDTS = { - kv_namespaces: config.kv_namespaces ?? [], - vars: collectAllVars({ ...args, config: config.configPath }), - wasm_modules: config.wasm_modules, - text_blobs: { - ...config.text_blobs, - }, - data_blobs: config.data_blobs, - durable_objects: config.durable_objects, - r2_buckets: config.r2_buckets, - d1_databases: config.d1_databases, - services: config.services, - analytics_engine_datasets: config.analytics_engine_datasets, - dispatch_namespaces: config.dispatch_namespaces, - logfwdr: config.logfwdr, - unsafe: config.unsafe, - rules: config.rules, - queues: config.queues, - send_email: config.send_email, - vectorize: config.vectorize, - hyperdrive: config.hyperdrive, - mtls_certificates: config.mtls_certificates, - browser: config.browser, - images: config.images, - ai: config.ai, - version_metadata: config.version_metadata, - secrets, - assets: config.assets, - workflows: config.workflows, - pipelines: config.pipelines, - secrets_store_secrets: config.secrets_store_secrets, - unsafe_hello_world: config.unsafe_hello_world, - ratelimits: config.ratelimits, - worker_loaders: config.worker_loaders, - vpc_services: config.vpc_services, - media: config.media, - }; + const collectionArgs = { + ...args, + config: config.configPath, + } satisfies Partial<(typeof typesCommand)["args"]>; const entrypointFormat = entrypoint?.format ?? "modules"; - const fullOutputPath = resolve(outputPath); // Note: we infer whether the user has provided an envInterface by checking // if it is different from the default `Env` value, this works well @@ -394,378 +378,802 @@ export async function generateEnvTypes( ); } - const envTypeStructure: [string, string][] = []; + const { rawConfig } = experimental_readRawConfig(collectionArgs); + const hasEnvironments = + !!rawConfig.env && Object.keys(rawConfig.env).length > 0; - if (configToDTS.kv_namespaces) { - for (const kvNamespace of configToDTS.kv_namespaces) { - envTypeStructure.push([ - constructTypeKey(kvNamespace.binding), - "KVNamespace", - ]); - } + const shouldGeneratePerEnvTypes = hasEnvironments && !args.env; + if (shouldGeneratePerEnvTypes) { + return generatePerEnvironmentTypes( + config, + collectionArgs, + envInterface, + outputPath, + entrypoint, + serviceEntries, + secrets, + log + ); + } + + return generateSimpleEnvTypes( + config, + collectionArgs, + envInterface, + outputPath, + entrypoint, + serviceEntries, + secrets, + log + ); +} + +/** + * Generates simple `Env` types. + * + * Used when no named environments exist or when `--env` is specified. + * + * @param config - The parsed Wrangler configuration object + * @param collectionArgs - CLI arguments for collecting bindings + * @param envInterface - The name of the generated environment interface + * @param outputPath - The file path where the generated types will be written + * @param entrypoint - Optional entry point information for the Worker + * @param serviceEntries - Optional map of service names to their entry points for cross-worker type generation + * @param secrets - Record of secret variable names to their values + * @param log - Whether to log output to the console (default: true) + * + * @returns An object containing the generated header comment and type definitions, or undefined values if no types were generated + */ +async function generateSimpleEnvTypes( + config: Config, + collectionArgs: Partial<(typeof typesCommand)["args"]>, + envInterface: string, + outputPath: string, + entrypoint?: Entry, + serviceEntries?: Map, + secrets: Record = {}, + log = true +): Promise<{ envHeader?: string; envTypes?: string }> { + const stringKeys = new Array(); + + const collectedBindings = collectCoreBindings(collectionArgs); + const collectedDurableObjects = collectAllDurableObjects(collectionArgs); + const collectedServices = collectAllServices(collectionArgs); + const collectedUnsafeBindings = collectAllUnsafeBindings(collectionArgs); + const collectedVars = collectAllVars(collectionArgs); + const collectedWorkflows = collectAllWorkflows(collectionArgs); + + const entrypointFormat = entrypoint?.format ?? "modules"; + const fullOutputPath = resolve(outputPath); + + const envTypeStructure = new Array<{ + key: string; + type: string; + }>(); + + for (const binding of collectedBindings) { + envTypeStructure.push({ + key: constructTypeKey(binding.name), + type: binding.type, + }); } - if (configToDTS.vars) { + if (collectedVars) { // Note: vars get overridden by secrets, so should their types - const vars = Object.entries(configToDTS.vars).filter( - ([key]) => !(key in configToDTS.secrets) + const vars = Object.entries(collectedVars).filter( + ([key]) => !(key in secrets) ); for (const [varName, varValues] of vars) { - envTypeStructure.push([ - constructTypeKey(varName), - varValues.length === 1 ? varValues[0] : varValues.join(" | "), - ]); + envTypeStructure.push({ + key: constructTypeKey(varName), + type: varValues.length === 1 ? varValues[0] : varValues.join(" | "), + }); stringKeys.push(varName); } } - for (const secretName in configToDTS.secrets) { - envTypeStructure.push([constructTypeKey(secretName), "string"]); + for (const secretName in secrets) { + envTypeStructure.push({ + key: constructTypeKey(secretName), + type: "string", + }); stringKeys.push(secretName); } - if (configToDTS.durable_objects?.bindings) { - for (const durableObject of configToDTS.durable_objects.bindings) { - const doEntrypoint = durableObject.script_name - ? serviceEntries?.get(durableObject.script_name) + for (const durableObject of collectedDurableObjects) { + const doEntrypoint = durableObject.script_name + ? serviceEntries?.get(durableObject.script_name) + : entrypoint; + + const importPath = doEntrypoint + ? generateImportSpecifier(fullOutputPath, doEntrypoint.file) + : undefined; + + const exportExists = doEntrypoint?.exports?.some( + (e) => e === durableObject.class_name + ); + + const key = constructTypeKey(durableObject.name); + + if (importPath && exportExists) { + envTypeStructure.push({ + key: key, + type: `DurableObjectNamespace`, + }); + continue; + } + + if (durableObject.script_name) { + envTypeStructure.push({ + key: key, + type: `DurableObjectNamespace /* ${durableObject.class_name} from ${durableObject.script_name} */`, + }); + continue; + } + + envTypeStructure.push({ + key: key, + type: `DurableObjectNamespace /* ${durableObject.class_name} */`, + }); + } + + for (const service of collectedServices) { + const serviceEntry = + service.service !== entrypoint?.name + ? serviceEntries?.get(service.service) : entrypoint; - const importPath = doEntrypoint - ? generateImportSpecifier(fullOutputPath, doEntrypoint.file) - : undefined; + const importPath = serviceEntry + ? generateImportSpecifier(fullOutputPath, serviceEntry.file) + : undefined; - const exportExists = doEntrypoint?.exports?.some( - (e) => e === durableObject.class_name - ); + const exportExists = serviceEntry?.exports?.some( + (e) => e === (service.entrypoint ?? "default") + ); - let typeName: string; + const key = constructTypeKey(service.binding); - if (importPath && exportExists) { - typeName = `DurableObjectNamespace`; - } else if (durableObject.script_name) { - typeName = `DurableObjectNamespace /* ${durableObject.class_name} from ${durableObject.script_name} */`; - } else { - typeName = `DurableObjectNamespace /* ${durableObject.class_name} */`; - } + if (importPath && exportExists) { + envTypeStructure.push({ + key: key, + type: `Service`, + }); + continue; + } - envTypeStructure.push([constructTypeKey(durableObject.name), typeName]); + if (service.entrypoint) { + envTypeStructure.push({ + key: key, + type: `Service /* entrypoint ${service.entrypoint} from ${service.service} */`, + }); + continue; } + + envTypeStructure.push({ + key, + type: `Fetcher /* ${service.service} */`, + }); } - if (configToDTS.r2_buckets) { - for (const R2Bucket of configToDTS.r2_buckets) { - envTypeStructure.push([constructTypeKey(R2Bucket.binding), "R2Bucket"]); + for (const workflow of collectedWorkflows) { + const workflowEntrypoint = workflow.script_name + ? serviceEntries?.get(workflow.script_name) + : entrypoint; + + const importPath = workflowEntrypoint + ? generateImportSpecifier(fullOutputPath, workflowEntrypoint.file) + : undefined; + + const exportExists = workflowEntrypoint?.exports?.some( + (e) => e === workflow.class_name + ); + + const key = constructTypeKey(workflow.binding); + + if (importPath && exportExists) { + envTypeStructure.push({ + key: key, + type: `Workflow[0]['payload']>`, + }); + continue; + } + + if (workflow.script_name) { + envTypeStructure.push({ + key: key, + type: `Workflow /* ${workflow.class_name} from ${workflow.script_name} */`, + }); + continue; } + + envTypeStructure.push({ + key, + type: `Workflow /* ${workflow.class_name} */`, + }); } - if (configToDTS.d1_databases) { - for (const d1 of configToDTS.d1_databases) { - envTypeStructure.push([constructTypeKey(d1.binding), "D1Database"]); + for (const unsafe of collectedUnsafeBindings) { + if (unsafe.type === "ratelimit") { + envTypeStructure.push({ + key: constructTypeKey(unsafe.name), + type: "RateLimit", + }); + continue; } + + envTypeStructure.push({ + key: constructTypeKey(unsafe.name), + type: "any", + }); } - if (configToDTS.secrets_store_secrets) { - for (const secretsStoreSecret of configToDTS.secrets_store_secrets) { - envTypeStructure.push([ - constructTypeKey(secretsStoreSecret.binding), - "SecretsStoreSecret", - ]); + // Data blobs are not environment-specific + if (config.data_blobs) { + for (const dataBlobs in config.data_blobs) { + envTypeStructure.push({ + key: constructTypeKey(dataBlobs), + type: "ArrayBuffer", + }); } } - if (configToDTS.unsafe_hello_world) { - for (const helloWorld of configToDTS.unsafe_hello_world) { - envTypeStructure.push([ - constructTypeKey(helloWorld.binding), - "HelloWorldBinding", - ]); + // Text blobs are not environment-specific + if (config.text_blobs) { + for (const textBlobs in config.text_blobs) { + envTypeStructure.push({ + key: constructTypeKey(textBlobs), + type: "string", + }); } } - if (configToDTS.ratelimits) { - for (const ratelimit of configToDTS.ratelimits) { - envTypeStructure.push([constructTypeKey(ratelimit.name), "RateLimit"]); + const modulesTypeStructure = new Array(); + if (config.rules) { + const moduleTypeMap = { + CompiledWasm: "WebAssembly.Module", + Data: "ArrayBuffer", + Text: "string", + }; + for (const ruleObject of config.rules) { + const typeScriptType = + moduleTypeMap[ruleObject.type as keyof typeof moduleTypeMap]; + if (typeScriptType === undefined) { + continue; + } + + for (const glob of ruleObject.globs) { + modulesTypeStructure.push(`declare module "${constructTSModuleGlob(glob)}" { +\tconst value: ${typeScriptType}; +\texport default value; +}`); + } } } - if (configToDTS.worker_loaders) { - for (const workerLoader of configToDTS.worker_loaders) { - envTypeStructure.push([ - constructTypeKey(workerLoader.binding), - "WorkerLoader", - ]); + const typesHaveBeenFound = + envTypeStructure.length > 0 || modulesTypeStructure.length > 0; + if (entrypointFormat === "modules" || typesHaveBeenFound) { + const { consoleOutput, fileContent } = generateTypeStrings( + entrypointFormat, + envInterface, + envTypeStructure.map(({ key, type }) => `${key}: ${type};`), + modulesTypeStructure, + stringKeys, + config.compatibility_date, + config.compatibility_flags, + entrypoint + ? generateImportSpecifier(fullOutputPath, entrypoint.file) + : undefined, + [...getDurableObjectClassNameToUseSQLiteMap(config.migrations).keys()] + ); + + const hash = createHash("sha256") + .update(consoleOutput) + .digest("hex") + .slice(0, 32); + + if (log) { + logger.log(chalk.dim(consoleOutput)); + } + + return { + envHeader: getEnvHeader(hash), + envTypes: fileContent, + }; + } else { + if (log) { + logger.log(chalk.dim("No project types to add.\n")); } + + return { + envHeader: undefined, + envTypes: undefined, + }; } +} - if (configToDTS.services) { - for (const service of configToDTS.services) { - const serviceEntry = - service.service !== entrypoint?.name - ? serviceEntries?.get(service.service) - : entrypoint; +/** + * Generates per-environment interface types plus an aggregated `Env` interface. + * + * Used when named environments exist and no `--env` flag is specified. + * + * @param config - The parsed Wrangler configuration object + * @param collectionArgs - CLI arguments for collecting bindings + * @param envInterface - The name of the generated environment interface + * @param outputPath - The file path where the generated types will be written + * @param entrypoint - Optional entry point information for the Worker + * @param serviceEntries - Optional map of service names to their entry points for cross-worker type generation + * @param secrets - Record of secret variable names to their values + * @param log - Whether to log output to the console (default: true) + * + * @returns An object containing the generated header comment and type definitions, or undefined values if no types were generated + */ +async function generatePerEnvironmentTypes( + config: Config, + collectionArgs: Partial<(typeof typesCommand)["args"]>, + envInterface: string, + outputPath: string, + entrypoint?: Entry, + serviceEntries?: Map, + secrets: Record = {}, + log = true +): Promise<{ envHeader?: string; envTypes?: string }> { + const { rawConfig } = experimental_readRawConfig(collectionArgs); + const envNames = Object.keys(rawConfig.env ?? {}); - const importPath = serviceEntry - ? generateImportSpecifier(fullOutputPath, serviceEntry.file) - : undefined; + validateEnvInterfaceNames(envNames); - const exportExists = serviceEntry?.exports?.some( - (e) => e === (service.entrypoint ?? "default") - ); + const entrypointFormat = entrypoint?.format ?? "modules"; + const fullOutputPath = resolve(outputPath); - let typeName: string; + const bindingsPerEnv = collectCoreBindingsPerEnvironment(collectionArgs); + const varsPerEnv = collectVarsPerEnvironment(collectionArgs); + const durableObjectsPerEnv = + collectDurableObjectsPerEnvironment(collectionArgs); + const servicesPerEnv = collectServicesPerEnvironment(collectionArgs); + const workflowsPerEnv = collectWorkflowsPerEnvironment(collectionArgs); + const unsafePerEnv = collectUnsafeBindingsPerEnvironment(collectionArgs); - if (importPath && exportExists) { - typeName = `Service`; - } else if (service.entrypoint) { - typeName = `Service /* entrypoint ${service.entrypoint} from ${service.service} */`; - } else { - typeName = `Fetcher /* ${service.service} */`; - } + // Track all binding names and their types across all environments for aggregation + const aggregatedBindings = new Map< + string, // Binding name + Set // Set of types + >(); - envTypeStructure.push([constructTypeKey(service.binding), typeName]); + // Track which environments each binding appears in + const bindingPresence = new Map>(); + + const allEnvNames = [TOP_LEVEL_ENV_NAME, ...envNames]; + + function trackBinding(name: string, type: string, envName: string): void { + let types = aggregatedBindings.get(name); + let presence = bindingPresence.get(name); + + if (!types) { + types = new Set(); + aggregatedBindings.set(name, types); } - } - if (configToDTS.analytics_engine_datasets) { - for (const analyticsEngine of configToDTS.analytics_engine_datasets) { - envTypeStructure.push([ - constructTypeKey(analyticsEngine.binding), - "AnalyticsEngineDataset", - ]); + if (!presence) { + presence = new Set(); + bindingPresence.set(name, presence); } + + types.add(type); + presence.add(envName); } - if (configToDTS.dispatch_namespaces) { - for (const namespace of configToDTS.dispatch_namespaces) { - envTypeStructure.push([ - constructTypeKey(namespace.binding), - "DispatchNamespace", - ]); + function getDurableObjectType(durableObject: { + name: string; + class_name: string; + script_name?: string; + }): string { + const doEntrypoint = durableObject.script_name + ? serviceEntries?.get(durableObject.script_name) + : entrypoint; + + const importPath = doEntrypoint + ? generateImportSpecifier(fullOutputPath, doEntrypoint.file) + : undefined; + + const exportExists = doEntrypoint?.exports?.some( + (e) => e === durableObject.class_name + ); + + if (importPath && exportExists) { + return `DurableObjectNamespace`; + } + + if (durableObject.script_name) { + return `DurableObjectNamespace /* ${durableObject.class_name} from ${durableObject.script_name} */`; } - } - if (configToDTS.logfwdr?.bindings?.length) { - envTypeStructure.push([constructTypeKey("LOGFWDR_SCHEMA"), "any"]); + return `DurableObjectNamespace /* ${durableObject.class_name} */`; } - if (configToDTS.data_blobs) { - for (const dataBlobs in configToDTS.data_blobs) { - envTypeStructure.push([constructTypeKey(dataBlobs), "ArrayBuffer"]); + function getServiceType(service: { + binding: string; + service: string; + entrypoint?: string; + }): string { + const serviceEntry = + service.service !== entrypoint?.name + ? serviceEntries?.get(service.service) + : entrypoint; + + const importPath = serviceEntry + ? generateImportSpecifier(fullOutputPath, serviceEntry.file) + : undefined; + + const exportExists = serviceEntry?.exports?.some( + (e) => e === (service.entrypoint ?? "default") + ); + + if (importPath && exportExists) { + return `Service`; + } + + if (service.entrypoint) { + return `Service /* entrypoint ${service.entrypoint} from ${service.service} */`; } + + return `Fetcher /* ${service.service} */`; } - if (configToDTS.text_blobs) { - for (const textBlobs in configToDTS.text_blobs) { - envTypeStructure.push([constructTypeKey(textBlobs), "string"]); + function getWorkflowType(workflow: { + binding: string; + name: string; + class_name: string; + script_name?: string; + }): string { + const workflowEntrypoint = workflow.script_name + ? serviceEntries?.get(workflow.script_name) + : entrypoint; + + const importPath = workflowEntrypoint + ? generateImportSpecifier(fullOutputPath, workflowEntrypoint.file) + : undefined; + + const exportExists = workflowEntrypoint?.exports?.some( + (e) => e === workflow.class_name + ); + + if (importPath && exportExists) { + return `Workflow[0]['payload']>`; + } + + if (workflow.script_name) { + return `Workflow /* ${workflow.class_name} from ${workflow.script_name} */`; } + + return `Workflow /* ${workflow.class_name} */`; } - if (configToDTS.unsafe?.bindings) { - for (const unsafe of configToDTS.unsafe.bindings) { - if (unsafe.type === "ratelimit") { - envTypeStructure.push([constructTypeKey(unsafe.name), "RateLimit"]); - } else { - envTypeStructure.push([constructTypeKey(unsafe.name), "any"]); + const perEnvInterfaces = new Array(); + const stringKeys = new Array(); + + for (const envName of envNames) { + const interfaceName = toEnvInterfaceName(envName); + const envBindings = new Array<{ key: string; value: string }>(); + + const bindings = bindingsPerEnv.get(envName) ?? []; + for (const binding of bindings) { + envBindings.push({ + key: constructTypeKey(binding.name), + value: binding.type, + }); + trackBinding(binding.name, binding.type, envName); + } + + const vars = varsPerEnv.get(envName) ?? {}; + for (const [varName, varValues] of Object.entries(vars)) { + if (varName in secrets) { + continue; + } + + const varType = + varValues.length === 1 ? varValues[0] : varValues.join(" | "); + envBindings.push({ key: constructTypeKey(varName), value: varType }); + trackBinding(varName, varType, envName); + if (!stringKeys.includes(varName)) { + stringKeys.push(varName); } } - } - if (configToDTS.queues) { - if (configToDTS.queues.producers) { - for (const queue of configToDTS.queues.producers) { - envTypeStructure.push([constructTypeKey(queue.binding), "Queue"]); + for (const secretName in secrets) { + envBindings.push({ key: constructTypeKey(secretName), value: "string" }); + if (!stringKeys.includes(secretName)) { + stringKeys.push(secretName); } } - } - if (configToDTS.send_email) { - for (const sendEmail of configToDTS.send_email) { - envTypeStructure.push([constructTypeKey(sendEmail.name), "SendEmail"]); + const durableObjects = durableObjectsPerEnv.get(envName) ?? []; + for (const durableObject of durableObjects) { + const type = getDurableObjectType(durableObject); + envBindings.push({ + key: constructTypeKey(durableObject.name), + value: type, + }); + trackBinding(durableObject.name, type, envName); } - } - if (configToDTS.vectorize) { - for (const vectorize of configToDTS.vectorize) { - envTypeStructure.push([ - constructTypeKey(vectorize.binding), - "VectorizeIndex", - ]); + const services = servicesPerEnv.get(envName) ?? []; + for (const service of services) { + const type = getServiceType(service); + envBindings.push({ key: constructTypeKey(service.binding), value: type }); + trackBinding(service.binding, type, envName); } - } - if (configToDTS.hyperdrive) { - for (const hyperdrive of configToDTS.hyperdrive) { - envTypeStructure.push([ - constructTypeKey(hyperdrive.binding), - "Hyperdrive", - ]); + const workflows = workflowsPerEnv.get(envName) ?? []; + for (const workflow of workflows) { + const type = getWorkflowType(workflow); + envBindings.push({ + key: constructTypeKey(workflow.binding), + value: type, + }); + trackBinding(workflow.binding, type, envName); } - } - if (configToDTS.vpc_services) { - for (const vpcService of configToDTS.vpc_services) { - envTypeStructure.push([constructTypeKey(vpcService.binding), "Fetcher"]); + const unsafeBindings = unsafePerEnv.get(envName) ?? []; + for (const unsafe of unsafeBindings) { + const type = unsafe.type === "ratelimit" ? "RateLimit" : "any"; + envBindings.push({ key: constructTypeKey(unsafe.name), value: type }); + trackBinding(unsafe.name, type, envName); } - } - if (configToDTS.mtls_certificates) { - for (const mtlsCertificate of configToDTS.mtls_certificates) { - envTypeStructure.push([ - constructTypeKey(mtlsCertificate.binding), - "Fetcher", - ]); + if (envBindings.length > 0) { + const bindingLines = envBindings + .map(({ key, value }) => `\t\t${key}: ${value};`) + .join("\n"); + perEnvInterfaces.push( + `\tinterface ${interfaceName} {\n${bindingLines}\n\t}` + ); + } else { + perEnvInterfaces.push(`\tinterface ${interfaceName} {}`); } } - if (configToDTS.browser) { - // The BrowserWorker type in @cloudflare/puppeteer is of type - // { fetch: typeof fetch }, but workers-types doesn't include it - // and Fetcher is valid for the purposes of handing it to puppeteer - envTypeStructure.push([ - constructTypeKey(configToDTS.browser.binding), - "Fetcher", - ]); + const topLevelBindings = bindingsPerEnv.get(TOP_LEVEL_ENV_NAME) ?? []; + for (const binding of topLevelBindings) { + trackBinding(binding.name, binding.type, TOP_LEVEL_ENV_NAME); } - if (configToDTS.ai) { - envTypeStructure.push([constructTypeKey(configToDTS.ai.binding), "Ai"]); + const topLevelVars = varsPerEnv.get(TOP_LEVEL_ENV_NAME) ?? {}; + for (const [varName, varValues] of Object.entries(topLevelVars)) { + if (varName in secrets) { + continue; + } + + const varType = + varValues.length === 1 ? varValues[0] : varValues.join(" | "); + trackBinding(varName, varType, TOP_LEVEL_ENV_NAME); + if (!stringKeys.includes(varName)) { + stringKeys.push(varName); + } } - if (configToDTS.images) { - envTypeStructure.push([ - constructTypeKey(configToDTS.images.binding), - "ImagesBinding", - ]); + const topLevelDOs = durableObjectsPerEnv.get(TOP_LEVEL_ENV_NAME) ?? []; + for (const durableObject of topLevelDOs) { + const type = getDurableObjectType(durableObject); + trackBinding(durableObject.name, type, TOP_LEVEL_ENV_NAME); } - if (configToDTS.media) { - envTypeStructure.push([ - constructTypeKey(configToDTS.media.binding), - "MediaBinding", - ]); + const topLevelServices = servicesPerEnv.get(TOP_LEVEL_ENV_NAME) ?? []; + for (const service of topLevelServices) { + const type = getServiceType(service); + trackBinding(service.binding, type, TOP_LEVEL_ENV_NAME); } - if (configToDTS.version_metadata) { - envTypeStructure.push([ - configToDTS.version_metadata.binding, - "WorkerVersionMetadata", - ]); + const topLevelWorkflows = workflowsPerEnv.get(TOP_LEVEL_ENV_NAME) ?? []; + for (const workflow of topLevelWorkflows) { + const type = getWorkflowType(workflow); + trackBinding(workflow.binding, type, TOP_LEVEL_ENV_NAME); } - if (configToDTS.assets?.binding) { - envTypeStructure.push([ - constructTypeKey(configToDTS.assets.binding), - "Fetcher", - ]); + const topLevelUnsafe = unsafePerEnv.get(TOP_LEVEL_ENV_NAME) ?? []; + for (const unsafe of topLevelUnsafe) { + const type = unsafe.type === "ratelimit" ? "RateLimit" : "any"; + trackBinding(unsafe.name, type, TOP_LEVEL_ENV_NAME); } - if (configToDTS.workflows) { - for (const workflow of configToDTS.workflows) { - const doEntrypoint = workflow.script_name - ? serviceEntries?.get(workflow.script_name) - : entrypoint; + const aggregatedEnvBindings = new Array<{ + key: string; + required: boolean; + type: string; + }>(); - const importPath = doEntrypoint - ? generateImportSpecifier(fullOutputPath, doEntrypoint.file) - : undefined; + for (const secretName in secrets) { + aggregatedEnvBindings.push({ + key: constructTypeKey(secretName), + required: true, + type: "string", + }); + } - const exportExists = doEntrypoint?.exports?.some( - (e) => e === workflow.class_name - ); + for (const [name, types] of aggregatedBindings.entries()) { + if (name in secrets) { + continue; + } - let typeName: string; + const typeArray = Array.from(types); + const unionType = + typeArray.length === 1 ? typeArray[0] : typeArray.join(" | "); + const presence = bindingPresence.get(name); - if (importPath && exportExists) { - typeName = `Workflow[0]['payload']>`; - } else if (workflow.script_name) { - typeName = `Workflow /* ${workflow.class_name} from ${workflow.script_name} */`; - } else { - typeName = `Workflow /* ${workflow.class_name} */`; - } + // Required if present in all environments (top-level + all named envs) + const isRequired = presence + ? allEnvNames.every((env) => presence.has(env)) + : false; + + aggregatedEnvBindings.push({ + key: constructTypeKey(name), + required: isRequired, + type: unionType, + }); + } - envTypeStructure.push([constructTypeKey(workflow.binding), typeName]); + // Data blobs are not environment-specific, add to aggregated `Env` + if (config.data_blobs) { + for (const dataBlobs in config.data_blobs) { + aggregatedEnvBindings.push({ + key: constructTypeKey(dataBlobs), + required: true, + type: "ArrayBuffer", + }); } } - if (configToDTS.pipelines) { - for (const pipeline of configToDTS.pipelines) { - envTypeStructure.push([ - constructTypeKey(pipeline.binding), - `import("cloudflare:pipelines").Pipeline`, - ]); + // Text blobs are not environment-specific, add to aggregated `Env` + if (config.text_blobs) { + for (const textBlobs in config.text_blobs) { + aggregatedEnvBindings.push({ + key: constructTypeKey(textBlobs), + required: true, + type: "string", + }); } } - const modulesTypeStructure: string[] = []; - if (configToDTS.rules) { + const modulesTypeStructure = new Array(); + if (config.rules) { const moduleTypeMap = { - Text: "string", - Data: "ArrayBuffer", CompiledWasm: "WebAssembly.Module", + Data: "ArrayBuffer", + Text: "string", }; - for (const ruleObject of configToDTS.rules) { + for (const ruleObject of config.rules) { const typeScriptType = moduleTypeMap[ruleObject.type as keyof typeof moduleTypeMap]; if (typeScriptType !== undefined) { - ruleObject.globs.forEach((glob) => { + for (const glob of ruleObject.globs) { modulesTypeStructure.push(`declare module "${constructTSModuleGlob(glob)}" { const value: ${typeScriptType}; export default value; -}`); - }); + }`); + } } } } - const wranglerCommandUsed = ["wrangler", ...process.argv.slice(2)].join(" "); - - const typesHaveBeenFound = - envTypeStructure.length || modulesTypeStructure.length; - if (entrypointFormat === "modules" || typesHaveBeenFound) { - const { fileContent, consoleOutput } = generateTypeStrings( - entrypointFormat, - envInterface, - envTypeStructure.map(([key, value]) => `${key}: ${value};`), - modulesTypeStructure, - stringKeys, - config.compatibility_date, - config.compatibility_flags, - entrypoint - ? generateImportSpecifier(fullOutputPath, entrypoint.file) - : undefined, - [...getDurableObjectClassNameToUseSQLiteMap(config.migrations).keys()] - ); - const hash = createHash("sha256") - .update(consoleOutput) - .digest("hex") - .slice(0, 32); - - const envHeader = `// Generated by Wrangler by running \`${wranglerCommandUsed}\` (hash: ${hash})`; + const { consoleOutput, fileContent } = generatePerEnvTypeStrings( + entrypointFormat, + envInterface, + perEnvInterfaces, + aggregatedEnvBindings, + modulesTypeStructure, + stringKeys, + config.compatibility_date, + config.compatibility_flags, + entrypoint + ? generateImportSpecifier(fullOutputPath, entrypoint.file) + : undefined, + [...getDurableObjectClassNameToUseSQLiteMap(config.migrations).keys()] + ); - if (log) { - logger.log(chalk.dim(consoleOutput)); - } + const hash = createHash("sha256") + .update(consoleOutput) + .digest("hex") + .slice(0, 32); - return { envHeader, envTypes: fileContent }; - } else { - if (log) { - logger.log(chalk.dim("No project types to add.\n")); - } - return { - envHeader: undefined, - envTypes: undefined, - }; + if (log) { + logger.log(chalk.dim(consoleOutput)); } + + return { + envHeader: getEnvHeader(hash), + envTypes: fileContent, + }; } -const checkPath = (path: string) => { - const wranglerOverrideDTSPath = findUpSync(path); - if (wranglerOverrideDTSPath === undefined) { - return; - } - try { - const fileContent = fs.readFileSync(wranglerOverrideDTSPath, "utf8"); - if ( +/** + * Generates type strings for per-environment interfaces plus aggregated Env. + * + * @param formatType - The worker format type ("modules" or "service-worker") + * @param envInterface - The name of the generated environment interface + * @param perEnvInterfaces - Array of per-environment interface strings + * @param aggregatedEnvBindings - Array of aggregated environment bindings as [key, type, required] + * @param modulesTypeStructure - Array of module type declaration strings + * @param stringKeys - Array of variable names that should be typed as strings in process.env + * @param compatibilityDate - Compatibility date for the worker + * @param compatibilityFlags - Compatibility flags for the worker + * @param entrypointModule - The import specifier for the main entrypoint module + * @param configuredDurableObjects - Array of configured Durable Object class names + * + * @returns An object containing the complete file content and console output strings + */ +function generatePerEnvTypeStrings( + formatType: string, + envInterface: string, + perEnvInterfaces: string[], + aggregatedEnvBindings: Array<{ + key: string; + required: boolean; + type: string; + }>, + modulesTypeStructure: string[], + stringKeys: string[], + compatibilityDate: string | undefined, + compatibilityFlags: string[] | undefined, + entrypointModule: string | undefined, + configuredDurableObjects: string[] +): { fileContent: string; consoleOutput: string } { + let baseContent = ""; + let processEnv = ""; + + if (formatType === "modules") { + if ( + isProcessEnvPopulated(compatibilityDate, compatibilityFlags) && + stringKeys.length > 0 + ) { + processEnv = `\ntype StringifyValues> = {\n\t[Binding in keyof EnvType]: EnvType[Binding] extends string ? EnvType[Binding] : string;\n};\ndeclare namespace NodeJS {\n\tinterface ProcessEnv extends StringifyValues `"${k}"`).join(" | ")}>> {}\n}`; + } + + const perEnvContent = perEnvInterfaces.join("\n"); + + const envBindingLines = aggregatedEnvBindings + .map((b) => `\t\t${b.key}${b.required ? "" : "?"}: ${b.type};`) + .join("\n"); + + const globalPropsContent = entrypointModule + ? `\n\tinterface GlobalProps {\n\t\tmainModule: typeof import("${entrypointModule}");${configuredDurableObjects.length > 0 ? `\n\t\tdurableNamespaces: ${configuredDurableObjects.map((d) => `"${d}"`).join(" | ")};` : ""}\n\t}` + : ""; + + baseContent = `declare namespace Cloudflare {${globalPropsContent}\n${perEnvContent}\n\tinterface Env {\n${envBindingLines}\n\t}\n}\ninterface ${envInterface} extends Cloudflare.Env {}${processEnv}`; + } else { + // Service worker syntax - just output aggregated bindings as globals + const envBindingLines = aggregatedEnvBindings + .map(({ key, type }) => `\tconst ${key}: ${type};`) + .join("\n"); + baseContent = `export {};\ndeclare global {\n${envBindingLines}\n}`; + } + + const modulesContent = modulesTypeStructure.join("\n"); + + return { + consoleOutput: `${baseContent}\n${modulesContent}`, + fileContent: `${baseContent}\n${modulesContent}`, + }; +} + +/** + * Checks if a .d.ts file at the given path exists and was not generated by Wrangler. + * + * @param path - The path to the .d.ts file to check. + * + * @returns void if no conflicting file exists. + * + * @throws {Error} If an unexpected error occurs while reading the file. + * @throws {UserError} If a non-Wrangler .d.ts file already exists at the given path. + */ +const validateTypesFile = (path: string): void => { + const wranglerOverrideDTSPath = findUpSync(path); + if (wranglerOverrideDTSPath === undefined) { + return; + } + + try { + const fileContent = fs.readFileSync(wranglerOverrideDTSPath, "utf8"); + if ( !fileContent.includes("Generated by Wrangler") && !fileContent.includes("Runtime types generated with workerd") ) { @@ -781,6 +1189,21 @@ const checkPath = (path: string) => { } }; +/** + * Generates type strings for a single aggregated Env interface. + * + * @param formatType - The worker format type ("modules" or "service-worker") + * @param envInterface - The name of the generated environment interface + * @param envTypeStructure - Array of environment binding strings + * @param modulesTypeStructure - Array of module type declaration strings + * @param stringKeys - Array of variable names that should be typed as strings in process.env + * @param compatibilityDate - Compatibility date for the worker + * @param compatibilityFlags - Compatibility flags for the worker + * @param entrypointModule - The entrypoint module path + * @param configuredDurableObjects - Array of configured durable object names + * + * @returns An object containing the complete file content and console output strings + */ function generateTypeStrings( formatType: string, envInterface: string, @@ -791,7 +1214,10 @@ function generateTypeStrings( compatibilityFlags: string[] | undefined, entrypointModule: string | undefined, configuredDurableObjects: string[] -): { fileContent: string; consoleOutput: string } { +): { + consoleOutput: string; + fileContent: string; +} { let baseContent = ""; let processEnv = ""; @@ -818,6 +1244,10 @@ function generateTypeStrings( /** * Attempts to read the tsconfig.json at the current path. + * + * @param tsconfigPath - The path to the tsconfig.json file + * + * @returns An array of types defined in the tsconfig.json's compilerOptions.types, or an empty array if not found or on error */ function readTsconfigTypes(tsconfigPath: string): string[] { if (!fs.existsSync(tsconfigPath)) { @@ -840,12 +1270,44 @@ type TSConfig = { }; }; -type VarTypes = Record; +/** + * Retrieves the environment config for a specific environment name, throwing if it doesn't exist. + * + * @param environmentName - The environment name specified via --env + * @param rawConfig - The raw config object + * + * @returns The environment config object + * + * @throws {UserError} If the environment doesn't exist in the config + */ +function getEnvConfig( + environmentName: string, + rawConfig: { env?: Record } +): RawEnvironment { + const envConfig = rawConfig.env?.[environmentName]; + if (!envConfig) { + const availableEnvs = Object.keys(rawConfig.env ?? {}); + const envList = + availableEnvs.length > 0 + ? `Available environments: ${availableEnvs.join(", ")}` + : "No environments are defined in the configuration file."; + throw new UserError( + `Environment "${environmentName}" not found in configuration.\n${envList}` + ); + } + + return envConfig; +} /** * Collects all the vars types across all the environments defined in the config file * + * Behavior: + * - If `args.env` is specified: only collect vars from that specific environment + * - Otherwise: collect vars from top-level AND all named environments + * * @param args all the CLI arguments passed to the `types` command + * * @returns an object which keys are the variable names and values are arrays containing all the computed types for such variables */ function collectAllVars( @@ -866,11 +1328,12 @@ function collectAllVars( return; } - if (typeof value === "number" || typeof value === "boolean") { - varsInfo[key].add(`${value}`); - return; - } - if (typeof value === "string" || typeof value === "object") { + if ( + typeof value === "string" || + typeof value === "number" || + typeof value === "boolean" || + typeof value === "object" + ) { varsInfo[key].add(JSON.stringify(value)); return; } @@ -881,10 +1344,16 @@ function collectAllVars( } const { rawConfig } = experimental_readRawConfig(args); - collectEnvironmentVars(rawConfig.vars); - Object.entries(rawConfig.env ?? {}).forEach(([_envName, env]) => { - collectEnvironmentVars(env.vars); - }); + + if (args.env) { + const envConfig = getEnvConfig(args.env, rawConfig); + collectEnvironmentVars(envConfig.vars); + } else { + collectEnvironmentVars(rawConfig.vars); + for (const env of Object.values(rawConfig.env ?? {})) { + collectEnvironmentVars(env.vars); + } + } return Object.fromEntries( Object.entries(varsInfo).map(([key, value]) => [key, [...value]]) @@ -894,13 +1363,14 @@ function collectAllVars( /** * Given an array it returns a string representing the types present in such array * - * e.g. - * `[1, 2, 3]` returns `number[]`, - * `[1, 2, 'three']` returns `(number|string)[]`, - * `['false', true]` returns `(string|boolean)[]`, - * * @param array the target array + * * @returns a string representing the types of such array + * + * @example + * `[1, 2, 3]` => `number[]` + * `[1, 2, 'three']` => `(number|string)[]` + * `['false', true]` => `(string|boolean)[]` */ function typeofArray(array: unknown[]): string { const typesInArray = [...new Set(array.map((item) => typeof item))].sort(); @@ -912,7 +1382,1601 @@ function typeofArray(array: unknown[]): string { return `(${typesInArray.join("|")})[]`; } -const logHorizontalRule = () => { - const screenWidth = process.stdout.columns; - logger.log(chalk.dim("─".repeat(Math.min(screenWidth, 60)))); -}; +interface CollectedBinding { + /** + * The binding category (e.g., "kv_namespaces", "d1_databases") + */ + bindingCategory: string; + + /** + * The binding name (e.g., "MY_KV_NAMESPACE") + */ + name: string; + + /** + * The TypeScript type (e.g., "KVNamespace") + */ + type: string; +} + +/** + * Collects all core bindings across environments defined in the config file + * + * This will aggregate and collect all bindings that can be collected in the same way. + * However some other resources, such as Durable Objects, services, etc, all have to be + * handled uniquely and as such have their own dedicated `collectX` functions. + * + * Behavior: + * - If `args.env` is specified: only collect bindings from that specific environment + * - Otherwise: collect bindings from top-level AND all named environments + * + * @param args - All the CLI arguments passed to the `types` command + * + * @returns An array of collected bindings with their names, types, and categories + * + * @throws {UserError} If a binding name exists with different types across environments + */ +function collectCoreBindings( + args: Partial<(typeof typesCommand)["args"]> +): Array { + const bindingsMap = new Map(); + + function addBinding( + name: string, + type: string, + bindingCategory: string, + envName: string + ) { + const existing = bindingsMap.get(name); + if (existing) { + if (existing.bindingCategory !== bindingCategory) { + throw new UserError( + `Binding "${name}" has conflicting types across environments: ` + + `"${existing.bindingCategory}" vs "${bindingCategory}" (in ${envName}). ` + + `Please use unique binding names for different binding types.` + ); + } + + return; + } + bindingsMap.set(name, { name, type, bindingCategory }); + } + + function collectEnvironmentBindings( + env: RawEnvironment | undefined, + envName: string + ) { + if (!env) { + return; + } + + for (const [index, kv] of (env.kv_namespaces ?? []).entries()) { + if (!kv.binding) { + throwMissingBindingError({ + binding: kv, + bindingType: "kv_namespaces", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + addBinding(kv.binding, "KVNamespace", "kv_namespaces", envName); + } + + for (const [index, r2] of (env.r2_buckets ?? []).entries()) { + if (!r2.binding) { + throwMissingBindingError({ + binding: r2, + bindingType: "r2_buckets", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + addBinding(r2.binding, "R2Bucket", "r2_buckets", envName); + } + + for (const [index, d1] of (env.d1_databases ?? []).entries()) { + if (!d1.binding) { + throwMissingBindingError({ + binding: d1, + bindingType: "d1_databases", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + addBinding(d1.binding, "D1Database", "d1_databases", envName); + } + + for (const [index, vectorize] of (env.vectorize ?? []).entries()) { + if (!vectorize.binding) { + throwMissingBindingError({ + binding: vectorize, + bindingType: "vectorize", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + addBinding(vectorize.binding, "VectorizeIndex", "vectorize", envName); + } + + for (const [index, hyperdrive] of (env.hyperdrive ?? []).entries()) { + if (!hyperdrive.binding) { + throwMissingBindingError({ + binding: hyperdrive, + bindingType: "hyperdrive", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + addBinding(hyperdrive.binding, "Hyperdrive", "hyperdrive", envName); + } + + for (const [index, sendEmail] of (env.send_email ?? []).entries()) { + if (!sendEmail.name) { + throwMissingBindingError({ + binding: sendEmail, + bindingType: "send_email", + configPath: args.config, + envName, + fieldName: "name", + index, + }); + } + + addBinding(sendEmail.name, "SendEmail", "send_email", envName); + } + + for (const [index, ae] of (env.analytics_engine_datasets ?? []).entries()) { + if (!ae.binding) { + throwMissingBindingError({ + binding: ae, + bindingType: "analytics_engine_datasets", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + addBinding( + ae.binding, + "AnalyticsEngineDataset", + "analytics_engine_datasets", + envName + ); + } + + for (const [index, dispatch] of (env.dispatch_namespaces ?? []).entries()) { + if (!dispatch.binding) { + throwMissingBindingError({ + binding: dispatch, + bindingType: "dispatch_namespaces", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + addBinding( + dispatch.binding, + "DispatchNamespace", + "dispatch_namespaces", + envName + ); + } + + for (const [index, mtls] of (env.mtls_certificates ?? []).entries()) { + if (!mtls.binding) { + throwMissingBindingError({ + binding: mtls, + bindingType: "mtls_certificates", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + addBinding(mtls.binding, "Fetcher", "mtls_certificates", envName); + } + + for (const [index, queue] of (env.queues?.producers ?? []).entries()) { + if (!queue.binding) { + throwMissingBindingError({ + binding: queue, + bindingType: "queues.producers", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + addBinding(queue.binding, "Queue", "queues_producers", envName); + } + + for (const [index, secret] of (env.secrets_store_secrets ?? []).entries()) { + if (!secret.binding) { + throwMissingBindingError({ + binding: secret, + bindingType: "secrets_store_secrets", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + addBinding( + secret.binding, + "SecretsStoreSecret", + "secrets_store_secrets", + envName + ); + } + + for (const [index, helloWorld] of ( + env.unsafe_hello_world ?? [] + ).entries()) { + if (!helloWorld.binding) { + throwMissingBindingError({ + binding: helloWorld, + bindingType: "unsafe_hello_world", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + addBinding( + helloWorld.binding, + "HelloWorldBinding", + "unsafe_hello_world", + envName + ); + } + + for (const [index, ratelimit] of (env.ratelimits ?? []).entries()) { + if (!ratelimit.name) { + throwMissingBindingError({ + binding: ratelimit, + bindingType: "ratelimits", + configPath: args.config, + envName, + fieldName: "name", + index, + }); + } + + addBinding(ratelimit.name, "RateLimit", "ratelimits", envName); + } + + for (const [index, workerLoader] of (env.worker_loaders ?? []).entries()) { + if (!workerLoader.binding) { + throwMissingBindingError({ + binding: workerLoader, + bindingType: "worker_loaders", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + addBinding( + workerLoader.binding, + "WorkerLoader", + "worker_loaders", + envName + ); + } + + for (const [index, vpcService] of (env.vpc_services ?? []).entries()) { + if (!vpcService.binding) { + throwMissingBindingError({ + binding: vpcService, + bindingType: "vpc_services", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + addBinding(vpcService.binding, "Fetcher", "vpc_services", envName); + } + + for (const [index, pipeline] of (env.pipelines ?? []).entries()) { + if (!pipeline.binding) { + throwMissingBindingError({ + binding: pipeline, + bindingType: "pipelines", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + addBinding( + pipeline.binding, + 'import("cloudflare:pipelines").Pipeline', + "pipelines", + envName + ); + } + + if (env.logfwdr?.bindings?.length) { + addBinding("LOGFWDR_SCHEMA", "any", "logfwdr", envName); + } + + if (env.browser) { + if (!env.browser.binding) { + throwMissingBindingError({ + binding: env.browser, + bindingType: "browser", + configPath: args.config, + envName, + fieldName: "binding", + }); + } else { + addBinding(env.browser.binding, "Fetcher", "browser", envName); + } + } + + if (env.ai) { + if (!env.ai.binding) { + throwMissingBindingError({ + binding: env.ai, + bindingType: "ai", + configPath: args.config, + envName, + fieldName: "binding", + }); + } else { + addBinding(env.ai.binding, "Ai", "ai", envName); + } + } + + if (env.images) { + if (!env.images.binding) { + throwMissingBindingError({ + binding: env.images, + bindingType: "images", + configPath: args.config, + envName, + fieldName: "binding", + }); + } else { + addBinding(env.images.binding, "ImagesBinding", "images", envName); + } + } + + if (env.media) { + if (!env.media.binding) { + throwMissingBindingError({ + binding: env.media, + bindingType: "media", + configPath: args.config, + envName, + fieldName: "binding", + }); + } else { + addBinding(env.media.binding, "MediaBinding", "media", envName); + } + } + + if (env.version_metadata) { + if (!env.version_metadata.binding) { + throwMissingBindingError({ + binding: env.version_metadata, + bindingType: "version_metadata", + configPath: args.config, + envName, + fieldName: "binding", + }); + } else { + addBinding( + env.version_metadata.binding, + "WorkerVersionMetadata", + "version_metadata", + envName + ); + } + } + + if (env.assets?.binding) { + addBinding(env.assets.binding, "Fetcher", "assets", envName); + } + } + + const { rawConfig } = experimental_readRawConfig(args); + + if (args.env) { + const envConfig = getEnvConfig(args.env, rawConfig); + collectEnvironmentBindings(envConfig, args.env); + } else { + collectEnvironmentBindings(rawConfig, TOP_LEVEL_ENV_NAME); + for (const [envName, env] of Object.entries(rawConfig.env ?? {})) { + collectEnvironmentBindings(env, envName); + } + } + + return Array.from(bindingsMap.values()); +} + +/** + * Collects Durable Object bindings across environments. + * + * This is separate because DOs need special handling for type generation. + * + * @param args - All the CLI arguments passed to the `types` command + * + * @returns An array of collected Durable Object bindings with their names, class name & possible script name. + */ +function collectAllDurableObjects( + args: Partial<(typeof typesCommand)["args"]> +): Array<{ + class_name: string; + name: string; + script_name?: string; +}> { + const durableObjectsMap = new Map< + string, + { + class_name: string; + name: string; + script_name?: string; + } + >(); + + function collectEnvironmentDOs( + env: RawEnvironment | undefined, + envName: string + ) { + if (!env?.durable_objects?.bindings) { + return; + } + + for (const [index, doBinding] of env.durable_objects.bindings.entries()) { + if (!doBinding.name) { + throwMissingBindingError({ + binding: doBinding, + bindingType: "durable_objects.bindings", + configPath: args.config, + envName, + fieldName: "name", + index, + }); + } + + if (durableObjectsMap.has(doBinding.name)) { + continue; + } + + durableObjectsMap.set(doBinding.name, { + class_name: doBinding.class_name, + name: doBinding.name, + script_name: doBinding.script_name, + }); + } + } + + const { rawConfig } = experimental_readRawConfig(args); + + if (args.env) { + const envConfig = getEnvConfig(args.env, rawConfig); + collectEnvironmentDOs(envConfig, args.env); + } else { + collectEnvironmentDOs(rawConfig, TOP_LEVEL_ENV_NAME); + for (const [envName, env] of Object.entries(rawConfig.env ?? {})) { + collectEnvironmentDOs(env, envName); + } + } + + return Array.from(durableObjectsMap.values()); +} + +/** + * Collects Service bindings across environments. + * + * This is separate because services need special handling for type generation. + * + * @param args - All the CLI arguments passed to the `types` command + * + * @returns An array of collected service bindings with their binding, service & possible entrypoint. + */ +function collectAllServices( + args: Partial<(typeof typesCommand)["args"]> +): Array<{ + binding: string; + service: string; + entrypoint?: string; +}> { + const servicesMap = new Map< + string, + { + binding: string; + entrypoint?: string; + service: string; + } + >(); + + function collectEnvironmentServices( + env: RawEnvironment | undefined, + envName: string + ) { + if (!env?.services) { + return; + } + + for (const [index, service] of env.services.entries()) { + if (!service.binding) { + throwMissingBindingError({ + binding: service, + bindingType: "services", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + if (servicesMap.has(service.binding)) { + continue; + } + + servicesMap.set(service.binding, { + binding: service.binding, + entrypoint: service.entrypoint, + service: service.service, + }); + } + } + + const { rawConfig } = experimental_readRawConfig(args); + + if (args.env) { + const envConfig = getEnvConfig(args.env, rawConfig); + collectEnvironmentServices(envConfig, args.env); + } else { + collectEnvironmentServices(rawConfig, TOP_LEVEL_ENV_NAME); + for (const [envName, env] of Object.entries(rawConfig.env ?? {})) { + collectEnvironmentServices(env, envName); + } + } + + return Array.from(servicesMap.values()); +} + +/** + * Collects Workflow bindings across environments. + * + * This is separate because workflows need special handling for type generation. + * + * @param args - All the CLI arguments passed to the `types` command + * + * @returns An array of collected workflow bindings with their names, class name, binding and possible script name. + */ +function collectAllWorkflows( + args: Partial<(typeof typesCommand)["args"]> +): Array<{ + binding: string; + name: string; + class_name: string; + script_name?: string; +}> { + const workflowsMap = new Map< + string, + { + binding: string; + name: string; + class_name: string; + script_name?: string; + } + >(); + + function collectEnvironmentWorkflows( + env: RawEnvironment | undefined, + envName: string + ) { + if (!env?.workflows) { + return; + } + + for (const [index, workflow] of env.workflows.entries()) { + if (!workflow.binding) { + throwMissingBindingError({ + binding: workflow, + bindingType: "workflows", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + if (workflowsMap.has(workflow.binding)) { + continue; + } + + workflowsMap.set(workflow.binding, { + binding: workflow.binding, + name: workflow.name, + class_name: workflow.class_name, + script_name: workflow.script_name, + }); + } + } + + const { rawConfig } = experimental_readRawConfig(args); + + if (args.env) { + const envConfig = getEnvConfig(args.env, rawConfig); + collectEnvironmentWorkflows(envConfig, args.env); + } else { + collectEnvironmentWorkflows(rawConfig, TOP_LEVEL_ENV_NAME); + for (const [envName, env] of Object.entries(rawConfig.env ?? {})) { + collectEnvironmentWorkflows(env, envName); + } + } + + return Array.from(workflowsMap.values()); +} + +/** + * Collects unsafe bindings across environments. + * + * @param args - All the CLI arguments passed to the `types` command + * + * @returns An array of collected unsafe bindings with their names and type. + */ +function collectAllUnsafeBindings( + args: Partial<(typeof typesCommand)["args"]> +): Array<{ + name: string; + type: string; +}> { + const unsafeMap = new Map< + string, + { + name: string; + type: string; + } + >(); + + function collectEnvironmentUnsafe( + env: RawEnvironment | undefined, + envName: string + ) { + if (!env?.unsafe?.bindings) { + return; + } + + for (const [index, binding] of env.unsafe.bindings.entries()) { + if (!binding.name) { + throwMissingBindingError({ + binding, + bindingType: "unsafe.bindings", + configPath: args.config, + envName, + fieldName: "name", + index, + }); + } + + if (unsafeMap.has(binding.name)) { + continue; + } + + unsafeMap.set(binding.name, { + name: binding.name, + type: binding.type, + }); + } + } + + const { rawConfig } = experimental_readRawConfig(args); + + if (args.env) { + const envConfig = getEnvConfig(args.env, rawConfig); + collectEnvironmentUnsafe(envConfig, args.env); + } else { + collectEnvironmentUnsafe(rawConfig, TOP_LEVEL_ENV_NAME); + for (const [envName, env] of Object.entries(rawConfig.env ?? {})) { + collectEnvironmentUnsafe(env, envName); + } + } + + return Array.from(unsafeMap.values()); +} + +const logHorizontalRule = () => { + const screenWidth = process.stdout.columns; + logger.log(chalk.dim("─".repeat(Math.min(screenWidth, 60)))); +}; + +interface PerEnvBinding { + bindingCategory: string; + name: string; + type: string; +} + +/** + * Collects vars per environment, returning a map from environment name to vars. + * + * Top-level vars use the sentinel `TOP_LEVEL_ENV_NAME`. + * + * @param args - CLI arguments passed to the `types` command + * + * @returns A map of environment name to an object of var names to their type values + */ +function collectVarsPerEnvironment( + args: Partial<(typeof typesCommand)["args"]> +): Map> { + const result = new Map>(); + + function collectVars(vars: RawEnvironment["vars"]): Record { + const varsInfo: Record> = {}; + + Object.entries(vars ?? {}).forEach(([key, value]) => { + varsInfo[key] ??= new Set(); + + if (!args.strictVars) { + varsInfo[key].add( + Array.isArray(value) ? typeofArray(value) : typeof value + ); + return; + } + + if ( + typeof value === "string" || + typeof value === "number" || + typeof value === "boolean" || + typeof value === "object" + ) { + varsInfo[key].add(JSON.stringify(value)); + return; + } + + varsInfo[key].add("unknown"); + }); + + return Object.fromEntries( + Object.entries(varsInfo).map(([key, value]) => [key, [...value]]) + ); + } + + const { rawConfig } = experimental_readRawConfig(args); + + // Collect top-level vars + const topLevelVars = collectVars(rawConfig.vars); + if (Object.keys(topLevelVars).length > 0) { + result.set(TOP_LEVEL_ENV_NAME, topLevelVars); + } + + // Collect per-environment vars + for (const [envName, env] of Object.entries(rawConfig.env ?? {})) { + const envVars = collectVars(env.vars); + if (Object.keys(envVars).length > 0) { + result.set(envName, envVars); + } + } + + return result; +} + +/** + * Collects core bindings per environment, returning a map from environment name to bindings. + * + * Top-level bindings use the sentinel `TOP_LEVEL_ENV_NAME`. + * + * Unlike collectCoreBindings which aggregates all bindings, this function keeps them separate + * per environment for per-environment interface generation. + * + * @param args - CLI arguments passed to the `types` command + * + * @returns A map of environment name to array of bindings + */ +function collectCoreBindingsPerEnvironment( + args: Partial<(typeof typesCommand)["args"]> +): Map> { + const result = new Map>(); + + function collectEnvironmentBindings( + env: RawEnvironment | undefined, + envName: string + ): Array { + if (!env) { + return []; + } + + const bindings = new Array(); + + for (const [index, kv] of (env.kv_namespaces ?? []).entries()) { + if (!kv.binding) { + throwMissingBindingError({ + binding: kv, + bindingType: "kv_namespaces", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + bindings.push({ + bindingCategory: "kv_namespaces", + name: kv.binding, + type: "KVNamespace", + }); + } + + for (const [index, r2] of (env.r2_buckets ?? []).entries()) { + if (!r2.binding) { + throwMissingBindingError({ + binding: r2, + bindingType: "r2_buckets", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + bindings.push({ + bindingCategory: "r2_buckets", + name: r2.binding, + type: "R2Bucket", + }); + } + + for (const [index, d1] of (env.d1_databases ?? []).entries()) { + if (!d1.binding) { + throwMissingBindingError({ + binding: d1, + bindingType: "d1_databases", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + bindings.push({ + bindingCategory: "d1_databases", + name: d1.binding, + type: "D1Database", + }); + } + + for (const [index, vectorize] of (env.vectorize ?? []).entries()) { + if (!vectorize.binding) { + throwMissingBindingError({ + binding: vectorize, + bindingType: "vectorize", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + bindings.push({ + bindingCategory: "vectorize", + name: vectorize.binding, + type: "VectorizeIndex", + }); + } + + for (const [index, hyperdrive] of (env.hyperdrive ?? []).entries()) { + if (!hyperdrive.binding) { + throwMissingBindingError({ + binding: hyperdrive, + bindingType: "hyperdrive", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + bindings.push({ + bindingCategory: "hyperdrive", + name: hyperdrive.binding, + type: "Hyperdrive", + }); + } + + for (const [index, sendEmail] of (env.send_email ?? []).entries()) { + if (!sendEmail.name) { + throwMissingBindingError({ + binding: sendEmail, + bindingType: "send_email", + configPath: args.config, + envName, + fieldName: "name", + index, + }); + } + + bindings.push({ + bindingCategory: "send_email", + name: sendEmail.name, + type: "SendEmail", + }); + } + + for (const [index, ae] of (env.analytics_engine_datasets ?? []).entries()) { + if (!ae.binding) { + throwMissingBindingError({ + binding: ae, + bindingType: "analytics_engine_datasets", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + bindings.push({ + bindingCategory: "analytics_engine_datasets", + name: ae.binding, + type: "AnalyticsEngineDataset", + }); + } + + for (const [index, dispatch] of (env.dispatch_namespaces ?? []).entries()) { + if (!dispatch.binding) { + throwMissingBindingError({ + binding: dispatch, + bindingType: "dispatch_namespaces", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + bindings.push({ + bindingCategory: "dispatch_namespaces", + name: dispatch.binding, + type: "DispatchNamespace", + }); + } + + for (const [index, mtls] of (env.mtls_certificates ?? []).entries()) { + if (!mtls.binding) { + throwMissingBindingError({ + binding: mtls, + bindingType: "mtls_certificates", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + bindings.push({ + bindingCategory: "mtls_certificates", + name: mtls.binding, + type: "Fetcher", + }); + } + + for (const [index, queue] of (env.queues?.producers ?? []).entries()) { + if (!queue.binding) { + throwMissingBindingError({ + binding: queue, + bindingType: "queues.producers", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + bindings.push({ + bindingCategory: "queues_producers", + name: queue.binding, + type: "Queue", + }); + } + + for (const [index, secret] of (env.secrets_store_secrets ?? []).entries()) { + if (!secret.binding) { + throwMissingBindingError({ + binding: secret, + bindingType: "secrets_store_secrets", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + bindings.push({ + bindingCategory: "secrets_store_secrets", + name: secret.binding, + type: "SecretsStoreSecret", + }); + } + + for (const [index, helloWorld] of ( + env.unsafe_hello_world ?? [] + ).entries()) { + if (!helloWorld.binding) { + throwMissingBindingError({ + binding: helloWorld, + bindingType: "unsafe_hello_world", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + bindings.push({ + bindingCategory: "unsafe_hello_world", + name: helloWorld.binding, + type: "HelloWorldBinding", + }); + } + + for (const [index, ratelimit] of (env.ratelimits ?? []).entries()) { + if (!ratelimit.name) { + throwMissingBindingError({ + binding: ratelimit, + bindingType: "ratelimits", + configPath: args.config, + envName, + fieldName: "name", + index, + }); + } + + bindings.push({ + bindingCategory: "ratelimits", + name: ratelimit.name, + type: "RateLimit", + }); + } + + for (const [index, workerLoader] of (env.worker_loaders ?? []).entries()) { + if (!workerLoader.binding) { + throwMissingBindingError({ + binding: workerLoader, + bindingType: "worker_loaders", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + bindings.push({ + bindingCategory: "worker_loaders", + name: workerLoader.binding, + type: "WorkerLoader", + }); + } + + for (const [index, vpcService] of (env.vpc_services ?? []).entries()) { + if (!vpcService.binding) { + throwMissingBindingError({ + binding: vpcService, + bindingType: "vpc_services", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + bindings.push({ + bindingCategory: "vpc_services", + name: vpcService.binding, + type: "Fetcher", + }); + } + + for (const [index, pipeline] of (env.pipelines ?? []).entries()) { + if (!pipeline.binding) { + throwMissingBindingError({ + binding: pipeline, + bindingType: "pipelines", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + bindings.push({ + bindingCategory: "pipelines", + name: pipeline.binding, + type: 'import("cloudflare:pipelines").Pipeline', + }); + } + + if (env.logfwdr?.bindings?.length) { + bindings.push({ + bindingCategory: "logfwdr", + name: "LOGFWDR_SCHEMA", + type: "any", + }); + } + + if (env.browser) { + if (!env.browser.binding) { + throwMissingBindingError({ + binding: env.browser, + bindingType: "browser", + configPath: args.config, + envName, + fieldName: "binding", + }); + } else { + bindings.push({ + bindingCategory: "browser", + name: env.browser.binding, + type: "Fetcher", + }); + } + } + + if (env.ai) { + if (!env.ai.binding) { + throwMissingBindingError({ + binding: env.ai, + bindingType: "ai", + configPath: args.config, + envName, + fieldName: "binding", + }); + } else { + bindings.push({ + bindingCategory: "ai", + name: env.ai.binding, + type: "Ai", + }); + } + } + + if (env.images) { + if (!env.images.binding) { + throwMissingBindingError({ + binding: env.images, + bindingType: "images", + configPath: args.config, + envName, + fieldName: "binding", + }); + } else { + bindings.push({ + bindingCategory: "images", + name: env.images.binding, + type: "ImagesBinding", + }); + } + } + + if (env.media) { + if (!env.media.binding) { + throwMissingBindingError({ + binding: env.media, + bindingType: "media", + configPath: args.config, + envName, + fieldName: "binding", + }); + } else { + bindings.push({ + bindingCategory: "media", + name: env.media.binding, + type: "MediaBinding", + }); + } + } + + if (env.version_metadata) { + if (!env.version_metadata.binding) { + throwMissingBindingError({ + binding: env.version_metadata, + bindingType: "version_metadata", + configPath: args.config, + envName, + fieldName: "binding", + }); + } else { + bindings.push({ + bindingCategory: "version_metadata", + name: env.version_metadata.binding, + type: "WorkerVersionMetadata", + }); + } + } + + if (env.assets?.binding) { + bindings.push({ + bindingCategory: "assets", + name: env.assets.binding, + type: "Fetcher", + }); + } + + return bindings; + } + + const { rawConfig } = experimental_readRawConfig(args); + + const topLevelBindings = collectEnvironmentBindings( + rawConfig, + TOP_LEVEL_ENV_NAME + ); + if (topLevelBindings.length > 0) { + result.set(TOP_LEVEL_ENV_NAME, topLevelBindings); + } + + for (const [envName, env] of Object.entries(rawConfig.env ?? {})) { + const envBindings = collectEnvironmentBindings(env, envName); + if (envBindings.length > 0) { + result.set(envName, envBindings); + } + } + + return result; +} + +/** + * Collects Durable Object bindings per environment. + * + * @param args - CLI arguments passed to the `types` command + * + * @returns A map of environment name to array of DO bindings + */ +function collectDurableObjectsPerEnvironment( + args: Partial<(typeof typesCommand)["args"]> +): Map< + string, + Array<{ + class_name: string; + name: string; + script_name?: string; + }> +> { + const result = new Map< + string, + Array<{ + class_name: string; + name: string; + script_name?: string; + }> + >(); + + function collectEnvironmentDOs( + env: RawEnvironment | undefined, + envName: string + ): Array<{ name: string; class_name: string; script_name?: string }> { + const durableObjects = new Array<{ + name: string; + class_name: string; + script_name?: string; + }>(); + + if (!env?.durable_objects?.bindings) { + return durableObjects; + } + + for (const [index, doBinding] of env.durable_objects.bindings.entries()) { + if (!doBinding.name) { + throwMissingBindingError({ + binding: doBinding, + bindingType: "durable_objects.bindings", + configPath: args.config, + envName, + fieldName: "name", + index, + }); + } + + durableObjects.push({ + class_name: doBinding.class_name, + name: doBinding.name, + script_name: doBinding.script_name, + }); + } + + return durableObjects; + } + + const { rawConfig } = experimental_readRawConfig(args); + + const topLevelDOs = collectEnvironmentDOs(rawConfig, TOP_LEVEL_ENV_NAME); + if (topLevelDOs.length > 0) { + result.set(TOP_LEVEL_ENV_NAME, topLevelDOs); + } + + for (const [envName, env] of Object.entries(rawConfig.env ?? {})) { + const envDOs = collectEnvironmentDOs(env, envName); + if (envDOs.length > 0) { + result.set(envName, envDOs); + } + } + + return result; +} + +/** + * Collects Service bindings per environment. + * + * @param args - CLI arguments passed to the `types` command + * + * @returns A map of environment name to array of service bindings + */ +function collectServicesPerEnvironment( + args: Partial<(typeof typesCommand)["args"]> +): Map< + string, + Array<{ + binding: string; + entrypoint?: string; + service: string; + }> +> { + const result = new Map< + string, + Array<{ + binding: string; + entrypoint?: string; + service: string; + }> + >(); + + function collectEnvironmentServices( + env: RawEnvironment | undefined, + envName: string + ): Array<{ + binding: string; + entrypoint?: string; + service: string; + }> { + const services = new Array<{ + binding: string; + service: string; + entrypoint?: string; + }>(); + + if (!env?.services) { + return services; + } + + for (const [index, service] of env.services.entries()) { + if (!service.binding) { + throwMissingBindingError({ + binding: service, + bindingType: "services", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + services.push({ + binding: service.binding, + entrypoint: service.entrypoint, + service: service.service, + }); + } + + return services; + } + + const { rawConfig } = experimental_readRawConfig(args); + + const topLevelServices = collectEnvironmentServices( + rawConfig, + TOP_LEVEL_ENV_NAME + ); + if (topLevelServices.length > 0) { + result.set(TOP_LEVEL_ENV_NAME, topLevelServices); + } + + for (const [envName, env] of Object.entries(rawConfig.env ?? {})) { + const envServices = collectEnvironmentServices(env, envName); + if (envServices.length > 0) { + result.set(envName, envServices); + } + } + + return result; +} + +/** + * Collects Workflow bindings per environment. + * + * @param args - CLI arguments passed to the `types` command + * + * @returns A map of environment name to array of workflow bindings + */ +function collectWorkflowsPerEnvironment( + args: Partial<(typeof typesCommand)["args"]> +): Map< + string, + Array<{ + binding: string; + class_name: string; + name: string; + script_name?: string; + }> +> { + const result = new Map< + string, + Array<{ + binding: string; + class_name: string; + name: string; + script_name?: string; + }> + >(); + + function collectEnvironmentWorkflows( + env: RawEnvironment | undefined, + envName: string + ): Array<{ + binding: string; + class_name: string; + name: string; + script_name?: string; + }> { + const workflows = new Array<{ + binding: string; + class_name: string; + name: string; + script_name?: string; + }>(); + + if (!env?.workflows) { + return workflows; + } + + for (const [index, workflow] of env.workflows.entries()) { + if (!workflow.binding) { + throwMissingBindingError({ + binding: workflow, + bindingType: "workflows", + configPath: args.config, + envName, + fieldName: "binding", + index, + }); + } + + workflows.push({ + binding: workflow.binding, + class_name: workflow.class_name, + name: workflow.name, + script_name: workflow.script_name, + }); + } + + return workflows; + } + + const { rawConfig } = experimental_readRawConfig(args); + + const topLevelWorkflows = collectEnvironmentWorkflows( + rawConfig, + TOP_LEVEL_ENV_NAME + ); + if (topLevelWorkflows.length > 0) { + result.set(TOP_LEVEL_ENV_NAME, topLevelWorkflows); + } + + for (const [envName, env] of Object.entries(rawConfig.env ?? {})) { + const envWorkflows = collectEnvironmentWorkflows(env, envName); + if (envWorkflows.length > 0) { + result.set(envName, envWorkflows); + } + } + + return result; +} + +/** + * Collects unsafe bindings per environment. + * + * @param args - CLI arguments passed to the `types` command + * + * @returns A map of environment name to array of unsafe bindings + */ +function collectUnsafeBindingsPerEnvironment( + args: Partial<(typeof typesCommand)["args"]> +): Map< + string, + Array<{ + name: string; + type: string; + }> +> { + const result = new Map< + string, + Array<{ + name: string; + type: string; + }> + >(); + + function collectEnvironmentUnsafe( + env: RawEnvironment | undefined, + envName: string + ): Array<{ + name: string; + type: string; + }> { + const unsafeBindings = new Array<{ + name: string; + type: string; + }>(); + + if (!env?.unsafe?.bindings) { + return unsafeBindings; + } + + for (const [index, binding] of env.unsafe.bindings.entries()) { + if (!binding.name) { + throwMissingBindingError({ + binding, + bindingType: "unsafe.bindings", + configPath: args.config, + envName, + fieldName: "name", + index, + }); + } + + unsafeBindings.push({ + name: binding.name, + type: binding.type, + }); + } + + return unsafeBindings; + } + + const { rawConfig } = experimental_readRawConfig(args); + + const topLevelUnsafe = collectEnvironmentUnsafe( + rawConfig, + TOP_LEVEL_ENV_NAME + ); + if (topLevelUnsafe.length > 0) { + result.set(TOP_LEVEL_ENV_NAME, topLevelUnsafe); + } + + for (const [envName, env] of Object.entries(rawConfig.env ?? {})) { + const envUnsafe = collectEnvironmentUnsafe(env, envName); + if (envUnsafe.length > 0) { + result.set(envName, envUnsafe); + } + } + + return result; +}