diff --git a/.gemini/settings.json b/.gemini/settings.json new file mode 100644 index 00000000..20d34743 --- /dev/null +++ b/.gemini/settings.json @@ -0,0 +1,8 @@ +{ + "mcpServers": { + "playwright": { + "command": "npx", + "args": ["-y", "@playwright/mcp@latest"] + } + } +} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 68b1afd1..85790477 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,15 +1,19 @@ name: CI on: + workflow_dispatch: pull_request: push: branches: - main tags: - '*' + schedule: + - cron: '0 8 * * *' jobs: pack: + name: Build package runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -24,6 +28,7 @@ jobs: path: '*.tgz' biome: + name: Lint (Biome) runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -34,6 +39,7 @@ jobs: - run: corepack yarn lint:js typescript: + name: Lint (TypeScript) runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -43,13 +49,15 @@ jobs: - run: corepack yarn - run: corepack yarn lint:ts - vitest: + unit: + name: Unit tests (Node ${{ matrix.node }}) runs-on: ubuntu-latest strategy: matrix: node: - 20 - 22 + - 24 steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 @@ -58,14 +66,75 @@ jobs: - run: corepack yarn - run: corepack yarn test:unit - name: Upload coverage reports artifact - if: matrix.node == 22 # Only upload coverage from the latest Node.js version + if: matrix.node == 24 uses: actions/upload-artifact@v4 with: name: coverage-reports path: coverage/ + e2e: + name: E2E tests + # Run on push/schedule/dispatch, or on PRs only if from same repo (not forks) + # This protects secrets from being exposed to fork PRs + if: > + github.event_name != 'pull_request' || + github.event.pull_request.head.repo.full_name == github.repository + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: 24 + - run: corepack yarn + - name: Download cloudflared + run: | + curl -fsSLo cloudflared-linux-amd64 https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 + chmod +x cloudflared-linux-amd64 + + # can be used for debugging: + # - name: Setup tmate session + # uses: mxschmitt/action-tmate@v3 + + - run: corepack yarn test + env: + TRANSLOADIT_KEY: ${{ secrets.TRANSLOADIT_KEY }} + TRANSLOADIT_SECRET: ${{ secrets.TRANSLOADIT_SECRET }} + NODE_OPTIONS: --trace-deprecation --trace-warnings + CLOUDFLARED_PATH: ./cloudflared-linux-amd64 + DEBUG: 'transloadit:*' + + - name: Generate the badge from the json-summary + run: node --experimental-strip-types test/generate-coverage-badge.ts coverage/coverage-summary.json + - name: Move HTML report and badge to the correct location + run: | + mv coverage/lcov-report static-build + mv coverage-badge.svg static-build/ + # *** BEGIN PUBLISH STATIC SITE STEPS *** + # Use the standard checkout action to check out the destination repo to a separate directory + # See https://github.com/mifi/github-action-push-static + - uses: actions/checkout@v4 + with: + ssh-key: ${{ secrets.COVERAGE_REPO_SSH_PRIVATE_KEY }} + repository: transloadit/node-sdk-coverage + path: static-files-destination + + # Push coverage data + - run: | + git config --global user.name github-actions + git config --global user.email github-actions@github.com + # Remove existing files: + rm -rf static-files-destination/* + # Replace with new files: + cp -a static-build/* static-files-destination/ + cd static-files-destination + git add . + # git diff-index: to avoid doing the git commit failing if there are no changes to be commit + git diff-index --quiet HEAD || git commit --message 'Static file updates' + git push + coverage: - needs: vitest + name: Upload coverage + needs: unit runs-on: ubuntu-latest if: github.ref == 'refs/heads/main' steps: @@ -83,13 +152,26 @@ jobs: name: node-sdk fail_ci_if_error: true + slack-on-failure: + name: Slack notification + needs: [e2e] + if: ${{ failure() && github.ref == 'refs/heads/main' }} + runs-on: ubuntu-latest + steps: + - uses: 8398a7/action-slack@v3 + with: + status: failure + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + release: + name: Publish to npm runs-on: ubuntu-latest needs: - pack - biome - typescript - - vitest + - unit if: startsWith(github.ref, 'refs/tags/') permissions: id-token: write diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml deleted file mode 100644 index 6aee7c8f..00000000 --- a/.github/workflows/integration.yml +++ /dev/null @@ -1,88 +0,0 @@ -name: Integration tests - -on: - workflow_dispatch: - push: - branches: - - main - schedule: - - cron: '0 8 * * *' - -jobs: - test: - runs-on: ubuntu-latest - strategy: - # Integration tests are not yet ready to run in parallel - max-parallel: 1 - matrix: - node: - - 20.19.0 - - 22.14.0 - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 - with: - node-version: ${{ matrix.node }} - - run: corepack yarn - - name: Download cloudflared - run: | - curl -fsSLo cloudflared-linux-amd64 https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 - chmod +x cloudflared-linux-amd64 - - # can be used for debugging: - # - name: Setup tmate session - # uses: mxschmitt/action-tmate@v3 - - - run: corepack yarn test:all - env: - TRANSLOADIT_KEY: ${{ secrets.TRANSLOADIT_KEY }} - TRANSLOADIT_SECRET: ${{ secrets.TRANSLOADIT_SECRET }} - NODE_OPTIONS: --trace-deprecation --trace-warnings - CLOUDFLARED_PATH: ./cloudflared-linux-amd64 - DEBUG: 'transloadit:*' - - - if: matrix.node == '22.14.0' - name: Generate the badge from the json-summary - run: node --experimental-strip-types test/generate-coverage-badge.ts coverage/coverage-summary.json - - if: matrix.node == '22.14.0' - name: Move HTML report and badge to the correct location - run: | - mv coverage/lcov-report static-build - mv coverage-badge.svg static-build/ - # *** BEGIN PUBLISH STATIC SITE STEPS *** - # Use the standard checkout action to check out the destination repo to a separate directory - # See https://github.com/mifi/github-action-push-static - - if: matrix.node == '22.14.0' - uses: actions/checkout@v4 - with: - ssh-key: ${{ secrets.COVERAGE_REPO_SSH_PRIVATE_KEY }} - repository: transloadit/node-sdk-coverage - path: static-files-destination - - # Push coverage data - - if: matrix.node == '22.14.0' - run: | - git config --global user.name github-actions - git config --global user.email github-actions@github.com - # Remove existing files: - rm -rf static-files-destination/* - # Replace with new files: - cp -a static-build/* static-files-destination/ - cd static-files-destination - git add . - # git diff-index: to avoid doing the git commit failing if there are no changes to be commit - git diff-index --quiet HEAD || git commit --message 'Static file updates' - git push - - slack-on-failure: - needs: [test] - if: ${{ failure() && github.ref == 'refs/heads/main' }} - - runs-on: ubuntu-latest - - steps: - - uses: 8398a7/action-slack@v3 - with: - status: failure - env: - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 2183316a..7fa6f343 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,18 @@ You may also want to refer to [GitHub releases](https://github.com/transloadit/node-sdk/releases). +## Unreleased + +- Add `signal` option to `createAssembly()` for cancelling in-flight HTTP requests and TUS uploads via `AbortController` +- Add `signal` and `onPoll` options to `awaitAssemblyCompletion()` for cancellation and early termination (useful for custom progress reporting or superseding assemblies in watch mode) +- Integrate transloadify CLI into the SDK, providing `assemblies`, `templates`, `bills`, and `assembly-notifications` commands +- Add `--log-level (-l)` CLI option using syslog severity levels (err=3, warn=4, notice=5, info=6, debug=7, trace=8) +- Add `--endpoint` CLI option for custom API endpoint (also reads `TRANSLOADIT_ENDPOINT` env var) +- Add `--single-assembly` flag to `assemblies create` for passing multiple input files to a single assembly +- Add `--concurrency` option to `assemblies create` to limit parallel processing (default: 5) +- Fix file descriptor exhaustion by closing streams immediately and creating fresh ones on demand +- Apply stricter biome lint rules (noExplicitAny, useAwait, noForEach, noNonNullAssertion) + ## v4.0.7 Released: 2025-11-26. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9eda24b6..d0e048f5 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -50,9 +50,9 @@ yarn test:unit This will also generate a coverage report in the `coverage` directory. -### Integration tests +### e2e tests -Integration tests are in the [`test/integration`](test/integration) folder. They require some extra setup. +e2e tests are in the [`test/e2e`](test/e2e) folder. They require some extra setup. Firstly, these tests require the Cloudflare executable. You can download this with: @@ -63,10 +63,10 @@ chmod +x cloudflared-linux-amd64 They also require a Transloadit key and secret, which you can get from https://transloadit.com/c/credentials. -You can run the integration tests with: +You can run the e2e tests with: ```sh -TRANSLOADIT_KEY='YOUR_TRANSLOADIT_KEY' TRANSLOADIT_SECRET='YOUR_TRANSLOADIT_SECRET' CLOUDFLARED_PATH='./cloudflared-linux-amd64' yarn test:integration +TRANSLOADIT_KEY='YOUR_TRANSLOADIT_KEY' TRANSLOADIT_SECRET='YOUR_TRANSLOADIT_SECRET' CLOUDFLARED_PATH='./cloudflared-linux-amd64' yarn test:e2e ``` ### Code Coverage diff --git a/MIGRATION.md b/MIGRATION.md index 83b5528b..50ebcf7a 100644 --- a/MIGRATION.md +++ b/MIGRATION.md @@ -19,11 +19,11 @@ Version 4 focuses on type-safety, clearer errors, and modern Node support. Most ```js // CommonJS import example async function getClient() { - const { Transloadit } = await import("transloadit"); + const { Transloadit } = await import('transloadit') return new Transloadit({ - authKey: process.env.TRANSLOADIT_KEY ?? "", - authSecret: process.env.TRANSLOADIT_SECRET ?? "", - }); + authKey: process.env.TRANSLOADIT_KEY ?? '', + authSecret: process.env.TRANSLOADIT_SECRET ?? '', + }) } ``` @@ -48,16 +48,16 @@ The package also exports `AssemblyInstructionsInput`, `AssemblyIndexItem`, `Asse const params: AssemblyInstructionsInput = { steps: { resize: { - use: ":original", - robot: "/image/resize", + use: ':original', + robot: '/image/resize', width: 320, height: 240, result: true, }, }, -}; +} -await transloadit.createAssembly({ params, waitForCompletion: true }); +await transloadit.createAssembly({ params, waitForCompletion: true }) ``` ## 3. Adjust API result handling @@ -82,15 +82,15 @@ const createdAssembly = await transloadit.createAssembly(...); ```ts try { - await transloadit.createAssembly({ params }); + await transloadit.createAssembly({ params }) } catch (error) { if (error instanceof ApiError && error.response.assembly_id) { console.error( - "Troubleshoot at https://transloadit.com/c/assemblies/" + + 'Troubleshoot at https://transloadit.com/c/assemblies/' + error.response.assembly_id - ); + ) } - throw error; + throw error } ``` @@ -103,18 +103,18 @@ try { authKey, authSecret, validateResponses: true, - }); + }) ``` - `getSignedSmartCDNUrl` generates Smart CDN URLs with signatures that match the server-side implementation: ```ts const signedUrl = transloadit.getSignedSmartCDNUrl({ - workspace: "my-team", - template: "hero-image", - input: "landing.jpg", - urlParams: { format: "webp" }, - }); + workspace: 'my-team', + template: 'hero-image', + input: 'landing.jpg', + urlParams: { format: 'webp' }, + }) ``` ## 6. Removed `createAssembly` callback support @@ -135,7 +135,7 @@ As a consequence of upgrading `got` to v14, the `gotRetry` option no longer acce ## Testing & troubleshooting -- Run your existing integration tests on Node 20+. If you relied on CommonJS `require`, convert those modules or wrap calls in `import()` shims as shown above. +- Run your existing e2e tests on Node 20+. If you relied on CommonJS `require`, convert those modules or wrap calls in `import()` shims as shown above. - If TypeScript raises errors about unfamiliar properties, import the respective types from `transloadit` instead of redefining them. - Schemas intentionally mirror the current public API. Some properties remain permissive while we tighten validation in the API itself; report gaps if the SDK raises or misses invalid data. diff --git a/README.md b/README.md index 416ad85a..c041b465 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,171 @@ or npm install --save transloadit ``` -## Usage +## Command Line Interface (CLI) + +This package includes a full-featured CLI for interacting with Transloadit from your terminal. + +### Quick Start + +```bash +# Set your credentials +export TRANSLOADIT_KEY="YOUR_TRANSLOADIT_KEY" +export TRANSLOADIT_SECRET="YOUR_TRANSLOADIT_SECRET" + +# See all available commands +npx transloadit --help +``` + +### Processing Media + +Create Assemblies to process files using Assembly Instructions (steps) or Templates: + +```bash +# Process a file using a steps file +npx transloadit assemblies create --steps steps.json --input image.jpg --output result.jpg + +# Process using a Template +npx transloadit assemblies create --template YOUR_TEMPLATE_ID --input image.jpg --output result.jpg + +# Process with custom fields +npx transloadit assemblies create --template YOUR_TEMPLATE_ID --field size=100 --input image.jpg --output thumb.jpg + +# Process a directory of files +npx transloadit assemblies create --template YOUR_TEMPLATE_ID --input images/ --output thumbs/ + +# Process recursively with file watching +npx transloadit assemblies create --template YOUR_TEMPLATE_ID --input images/ --output thumbs/ --recursive --watch + +# Process multiple files in a single assembly +npx transloadit assemblies create --template YOUR_TEMPLATE_ID --input file1.jpg --input file2.jpg --output results/ --single-assembly + +# Limit concurrent processing (default: 5) +npx transloadit assemblies create --template YOUR_TEMPLATE_ID --input images/ --output thumbs/ --concurrency 2 +``` + +### Managing Assemblies + +```bash +# List recent assemblies +npx transloadit assemblies list + +# List assemblies with filters +npx transloadit assemblies list --after 2024-01-01 --before 2024-12-31 + +# Get assembly status +npx transloadit assemblies get ASSEMBLY_ID + +# Cancel an assembly +npx transloadit assemblies delete ASSEMBLY_ID + +# Replay an assembly (re-run with original instructions) +npx transloadit assemblies replay ASSEMBLY_ID + +# Replay with different steps +npx transloadit assemblies replay --steps new-steps.json ASSEMBLY_ID + +# Replay using latest template version +npx transloadit assemblies replay --reparse-template ASSEMBLY_ID +``` + +### Managing Templates + +```bash +# List all templates +npx transloadit templates list + +# Get template content +npx transloadit templates get TEMPLATE_ID + +# Create a template from a JSON file +npx transloadit templates create my-template template.json + +# Modify a template +npx transloadit templates modify TEMPLATE_ID template.json + +# Rename a template +npx transloadit templates modify TEMPLATE_ID --name new-name + +# Delete a template +npx transloadit templates delete TEMPLATE_ID + +# Sync local template files with Transloadit (bidirectional) +npx transloadit templates sync templates/*.json +npx transloadit templates sync --recursive templates/ +``` + +### Billing + +```bash +# Get bill for a month +npx transloadit bills get 2024-01 + +# Get detailed bill as JSON +npx transloadit bills get 2024-01 --json +``` + +### Assembly Notifications + +```bash +# Replay a notification +npx transloadit assembly-notifications replay ASSEMBLY_ID + +# Replay to a different URL +npx transloadit assembly-notifications replay --notify-url https://example.com/hook ASSEMBLY_ID +``` + +### Signature Generation + +```bash +# Generate a signature for assembly params +echo '{"steps":{}}' | npx transloadit auth signature + +# Generate with specific algorithm +echo '{"steps":{}}' | npx transloadit auth signature --algorithm sha256 + +# Generate a signed Smart CDN URL +echo '{"workspace":"my-workspace","template":"my-template","input":"image.jpg"}' | npx transloadit auth smart-cdn +``` + +### CLI Options + +All commands support these common options: + +- `--json, -j` - Output results as JSON (useful for scripting) +- `--log-level, -l` - Set log verbosity level by name or number (default: notice) +- `--endpoint` - Custom API endpoint URL (or set `TRANSLOADIT_ENDPOINT` env var) +- `--help, -h` - Show help for a command + +The `assemblies create` command additionally supports: + +- `--single-assembly` - Pass all input files to a single assembly instead of one assembly per file + +#### Log Levels + +The CLI uses [syslog severity levels](https://en.wikipedia.org/wiki/Syslog#Severity_level). Lower = more severe, higher = more verbose: + +| Level | Value | Description | +| -------- | ----- | ------------------------------------- | +| `err` | 3 | Error conditions | +| `warn` | 4 | Warning conditions | +| `notice` | 5 | Normal but significant **(default)** | +| `info` | 6 | Informational messages | +| `debug` | 7 | Debug-level messages | +| `trace` | 8 | Most verbose/detailed | + +You can use either the level name or its numeric value: + +```bash +# Show only errors and warnings +npx transloadit assemblies list -l warn +npx transloadit assemblies list -l 4 + +# Show debug output +npx transloadit assemblies list -l debug +npx transloadit assemblies list -l 7 +``` + +## SDK Usage The following code will upload an image and resize it to a thumbnail: @@ -97,10 +261,12 @@ You can find [details about your executed Assemblies here](https://transloadit.c - [Upload and resize image](https://github.com/transloadit/node-sdk/blob/main/examples/resize_an_image.ts) - [Upload image and convert to WebP](https://github.com/transloadit/node-sdk/blob/main/examples/convert_to_webp.ts) +- [Rasterize SVG to PNG](https://github.com/transloadit/node-sdk/blob/main/examples/rasterize_svg_to_png.ts) - [Crop a face out of an image and download the result](https://github.com/transloadit/node-sdk/blob/main/examples/face_detect_download.ts) - [Retry example](https://github.com/transloadit/node-sdk/blob/main/examples/retry.ts) - [Calculate total costs (GB usage)](https://github.com/transloadit/node-sdk/blob/main/examples/fetch_costs_of_all_assemblies_in_timeframe.ts) - [Templates CRUD](https://github.com/transloadit/node-sdk/blob/main/examples/template_api.ts) +- [Template Credentials CRUD](https://github.com/transloadit/node-sdk/blob/main/examples/credentials.ts) For more fully working examples take a look at [`examples/`](https://github.com/transloadit/node-sdk/blob/main/examples/). @@ -116,6 +282,7 @@ Table of contents: - [Assemblies](#assemblies) - [Assembly notifications](#assembly-notifications) - [Templates](#templates) +- [Template Credentials](#template-credentials) - [Errors](#errors) - [Rate limiting & auto retry](#rate-limiting--auto-retry) @@ -301,6 +468,8 @@ This function will continously poll the specified Assembly `assemblyId` and reso - `onAssemblyProgress` - A progress function called on each poll. See `createAssembly` - `timeout` - How many milliseconds until polling times out (default: no timeout) - `interval` - Poll interval in milliseconds (default `1000`) +- `signal` - An `AbortSignal` to cancel polling. When aborted, the promise rejects with an `AbortError`. +- `onPoll` - A callback invoked at the start of each poll iteration. Return `false` to stop polling early and resolve with the last known status. Useful for implementing custom cancellation logic (e.g., superseding assemblies in watch mode). #### getLastUsedAssemblyUrl() @@ -369,6 +538,34 @@ The method returns an object containing these properties: Creates an `objectMode` `Readable` stream that automates handling of `listTemplates` pagination. Similar to `streamAssemblies`. +### Template Credentials + +Template Credentials allow you to store third-party credentials (e.g., AWS S3, Google Cloud Storage, FTP) securely on Transloadit for use in your Assembly Instructions. + +#### async createTemplateCredential(params) + +Creates a new Template Credential. The `params` object should contain the credential configuration. See [API documentation](https://transloadit.com/docs/api/template-credentials-post/). + +#### async editTemplateCredential(credentialId, params) + +Updates an existing Template Credential identified by `credentialId`. See [API documentation](https://transloadit.com/docs/api/template-credentials-credential-id-put/). + +#### async deleteTemplateCredential(credentialId) + +Deletes the Template Credential identified by `credentialId`. See [API documentation](https://transloadit.com/docs/api/template-credentials-credential-id-delete/). + +#### async getTemplateCredential(credentialId) + +Retrieves the Template Credential identified by `credentialId`. See [API documentation](https://transloadit.com/docs/api/template-credentials-credential-id-get/). + +#### async listTemplateCredentials(params) + +Lists all Template Credentials. See [API documentation](https://transloadit.com/docs/api/template-credentials-get/). + +#### streamTemplateCredentials(params) + +Creates an `objectMode` `Readable` stream that automates handling of `listTemplateCredentials` pagination. Similar to `streamAssemblies`. + ### Other #### setDefaultTimeout(timeout) @@ -385,25 +582,7 @@ Calculates a signature for the given `params` JSON object. If the `params` objec This function returns an object with the key `signature` (containing the calculated signature string) and a key `params`, which contains the stringified version of the passed `params` object (including the set expires and authKey keys). -#### CLI smart_sig - -Generate a signed Smart CDN URL from the command line. The CLI reads a JSON object from stdin, injects credentials from `TRANSLOADIT_KEY`/`TRANSLOADIT_SECRET`, and prints the URL returned by `getSignedSmartCDNUrl()`. - -```sh -TRANSLOADIT_KEY=... TRANSLOADIT_SECRET=... \ - printf '{"workspace":"demo","template":"resize","input":"image.jpg","url_params":{"width":320}}' | npx transloadit smart_sig -``` - -You can also use `TRANSLOADIT_AUTH_KEY`/`TRANSLOADIT_AUTH_SECRET` as aliases for the environment variables. - -#### CLI sig - -Sign assembly params from the command line. The CLI reads a JSON object from stdin (or falls back to an empty object), injects credentials from `TRANSLOADIT_KEY`/`TRANSLOADIT_SECRET`, and prints the payload returned by `calcSignature()`. Use `--algorithm` to pick a specific hashing algorithm; it defaults to `sha384`. - -```sh -TRANSLOADIT_KEY=... TRANSLOADIT_SECRET=... \ - printf '{"auth":{"expires":"2025-01-02T00:00:00Z"}}' | npx transloadit sig --algorithm sha256 -``` +See [Signature Generation](#signature-generation) in the CLI section for command-line usage. #### getSignedSmartCDNUrl(params) @@ -495,7 +674,7 @@ If you want to retry on other errors, please see the [retry example code](exampl This project uses [debug](https://github.com/visionmedia/debug) so you can run node with the `DEBUG=transloadit` evironment variable to enable verbose logging. Example: ```bash -DEBUG=transloadit* node examples/template_api.js +DEBUG=transloadit* npx tsx examples/template_api.ts ``` ## Maintainers diff --git a/biome.json b/biome.json index 85545952..aef24a02 100644 --- a/biome.json +++ b/biome.json @@ -1,11 +1,13 @@ { "$schema": "https://biomejs.dev/schemas/2.2.4/schema.json", + "vcs": { "enabled": true, "clientKind": "git", "useIgnoreFile": true }, "files": { + "ignoreUnknown": false, "includes": [ "**", + "!package.json", "!coverage", "!dist", - "!node_modules", "!fixture", "!.vscode", "!src/alphalib" @@ -18,30 +20,43 @@ "indentWidth": 2, "lineEnding": "lf", "lineWidth": 100, - "attributePosition": "auto" + "attributePosition": "auto", + "bracketSameLine": false, + "bracketSpacing": true, + "expand": "auto", + "useEditorconfig": true, + "includes": ["**", "!**/lib/", "!**/node_modules/"] }, - "assist": { "actions": { "source": { "organizeImports": "on" } } }, "linter": { "enabled": true, "rules": { "recommended": true, "suspicious": { - "noExplicitAny": "warn", + "noExplicitAny": "error", "noImplicitAnyLet": "error", - "noConfusingVoidType": "warn", - "noAssignInExpressions": "off", - "noTemplateCurlyInString": "off" + "noConfusingVoidType": "error", + "noAssignInExpressions": "error", + "noArrayIndexKey": "error", + "noShadowRestrictedNames": "error", + "noExportsInTest": "error", + "noDuplicateTestHooks": "error", + "useIterableCallbackReturn": "error", + "noTemplateCurlyInString": "off", + "useAwait": "error" }, "correctness": { - "noInvalidUseBeforeDeclaration": "warn" + "noInvalidUseBeforeDeclaration": "error", + "noVoidTypeReturn": "error" }, "complexity": { - "useLiteralKeys": "off", - "noForEach": "warn" + "useLiteralKeys": "error", + "noForEach": "error" }, "style": { - "noParameterAssign": "warn", - "noUnusedTemplateLiteral": "off", + "noNonNullAssertion": "error", + "noNamespace": "error", + "noParameterAssign": "error", + "noUnusedTemplateLiteral": "error", "useAsConstAssertion": "error", "useDefaultParameterLast": "error", "useEnumInitializers": "error", @@ -49,7 +64,14 @@ "useSingleVarDeclarator": "error", "useNumberNamespace": "error", "noInferrableTypes": "error", - "noUselessElse": "error" + "noUselessElse": "error", + "useImportType": { + "level": "error", + "options": { + "style": "separatedType" + } + }, + "useNodejsImportProtocol": "error" } } }, @@ -60,10 +82,25 @@ "trailingCommas": "all", "semicolons": "asNeeded", "arrowParentheses": "always", - "bracketSpacing": true, "bracketSameLine": false, "quoteStyle": "single", - "attributePosition": "auto" + "attributePosition": "auto", + "bracketSpacing": true } - } + }, + "assist": { + "enabled": true, + "actions": { "source": { "organizeImports": "on" } } + }, + "overrides": [ + { + "includes": ["*.html"], + "javascript": { "formatter": { "quoteStyle": "double" } } + }, + { + "includes": ["*.scss", "*.css"], + "javascript": { "formatter": { "quoteStyle": "double" } }, + "formatter": { "lineWidth": 80 } + } + ] } diff --git a/examples/credentials.ts b/examples/credentials.ts index 397754a8..11db4829 100644 --- a/examples/credentials.ts +++ b/examples/credentials.ts @@ -6,7 +6,8 @@ // // yarn prepack // -import { type CreateTemplateCredentialParams, Transloadit } from 'transloadit' +import type { CreateTemplateCredentialParams } from 'transloadit' +import { Transloadit } from 'transloadit' const { TRANSLOADIT_KEY, TRANSLOADIT_SECRET } = process.env if (TRANSLOADIT_KEY == null || TRANSLOADIT_SECRET == null) { @@ -31,7 +32,7 @@ const credentialParams: CreateTemplateCredentialParams = { }, } -console.log(`==> listTemplateCredentials`) +console.log('==> listTemplateCredentials') const { credentials } = await transloadit.listTemplateCredentials({ sort: 'created', order: 'asc', @@ -50,7 +51,7 @@ for (const credential of credentials) { } } -console.log(`==> createTemplateCredential`) +console.log('==> createTemplateCredential') const createTemplateCredentialResult = await transloadit.createTemplateCredential(credentialParams) console.log('TemplateCredential created successfully:', createTemplateCredentialResult) // ^-- with Templates, there is `ok`, `message`, `id`, `content`, `name`, `require_signature_auth`. Same is true for: created, updated, fetched diff --git a/examples/template_api.ts b/examples/template_api.ts index 9d792bfe..7ef841d8 100644 --- a/examples/template_api.ts +++ b/examples/template_api.ts @@ -6,7 +6,8 @@ // // yarn prepack // -import { type TemplateContent, Transloadit } from 'transloadit' +import type { TemplateContent } from 'transloadit' +import { Transloadit } from 'transloadit' const { TRANSLOADIT_KEY, TRANSLOADIT_SECRET } = process.env if (TRANSLOADIT_KEY == null || TRANSLOADIT_SECRET == null) { diff --git a/package.json b/package.json index 9d623b8c..4079271c 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "mp3" ], "author": "Tim Koschuetzki ", - "packageManager": "yarn@4.10.3", + "packageManager": "yarn@4.12.0", "engines": { "node": ">= 20" }, @@ -20,12 +20,17 @@ "@aws-sdk/client-s3": "^3.891.0", "@aws-sdk/s3-request-presigner": "^3.891.0", "@transloadit/sev-logger": "^0.0.15", + "clipanion": "^4.0.0-rc.4", "debug": "^4.4.3", + "dotenv": "^17.2.3", "form-data": "^4.0.4", "got": "14.4.9", "into-stream": "^9.0.0", "is-stream": "^4.0.1", + "node-watch": "^0.7.4", "p-map": "^7.0.3", + "p-queue": "^9.0.1", + "recursive-readdir": "^2.2.3", "tus-js-client": "^4.3.1", "type-fest": "^4.41.0", "zod": "3.25.76" @@ -33,14 +38,16 @@ "devDependencies": { "@biomejs/biome": "^2.2.4", "@types/debug": "^4.1.12", + "@types/recursive-readdir": "^2.2.4", "@types/temp": "^0.9.4", "@vitest/coverage-v8": "^3.2.4", "badge-maker": "^5.0.2", - "dotenv": "^17.2.2", "execa": "9.6.0", + "image-size": "^2.0.2", "nock": "^14.0.10", "npm-run-all": "^4.1.5", "p-retry": "^7.0.0", + "rimraf": "^6.1.2", "temp": "^0.9.4", "tsx": "4.20.5", "typescript": "5.9.2", @@ -61,12 +68,10 @@ "lint:js": "biome check .", "lint": "npm-run-all --parallel 'lint:js'", "fix": "npm-run-all --serial 'fix:js'", - "next:update": "next-update --keep true --tldr", "prepack": "rm -f tsconfig.tsbuildinfo tsconfig.build.tsbuildinfo && tsc --build tsconfig.build.json", "test:unit": "vitest run --coverage ./test/unit", - "test:integration": "vitest run ./test/integration", - "test:all": "vitest run --coverage", - "test": "yarn test:unit" + "test:e2e": "vitest run ./test/e2e", + "test": "vitest run --coverage" }, "license": "MIT", "main": "./dist/Transloadit.js", diff --git a/src/ApiError.ts b/src/ApiError.ts index d1ec2362..9719a678 100644 --- a/src/ApiError.ts +++ b/src/ApiError.ts @@ -1,4 +1,5 @@ -import { HTTPError, type RequestError } from 'got' +import type { RequestError } from 'got' +import { HTTPError } from 'got' export interface TransloaditErrorResponseBody { error?: string diff --git a/src/Transloadit.ts b/src/Transloadit.ts index f8a29793..a093675e 100644 --- a/src/Transloadit.ts +++ b/src/Transloadit.ts @@ -3,28 +3,23 @@ import { createHmac, randomUUID } from 'node:crypto' import { constants, createReadStream } from 'node:fs' import { access } from 'node:fs/promises' import type { Readable } from 'node:stream' +import { setTimeout as delay } from 'node:timers/promises' import debug from 'debug' import FormData from 'form-data' -import got, { - type Delays, - type Headers, - HTTPError, - type OptionsOfJSONResponseBody, - RequestError, - type RetryOptions, -} from 'got' +import type { Delays, Headers, OptionsOfJSONResponseBody, RetryOptions } from 'got' +import got, { HTTPError, RequestError } from 'got' import intoStream, { type Input as IntoStreamInput } from 'into-stream' import { isReadableStream, isStream } from 'is-stream' import pMap from 'p-map' import packageJson from '../package.json' with { type: 'json' } -import { ApiError, type TransloaditErrorResponseBody } from './ApiError.ts' -import { - type AssemblyIndex, - type AssemblyIndexItem, - type AssemblyStatus, - assemblyIndexSchema, - assemblyStatusSchema, +import type { TransloaditErrorResponseBody } from './ApiError.ts' +import { ApiError } from './ApiError.ts' +import type { + AssemblyIndex, + AssemblyIndexItem, + AssemblyStatus, } from './alphalib/types/assemblyStatus.ts' +import { assemblyIndexSchema, assemblyStatusSchema } from './alphalib/types/assemblyStatus.ts' import { zodParseWithContext } from './alphalib/zodParseWithContext.ts' import type { BaseResponse, @@ -50,7 +45,8 @@ import type { import InconsistentResponseError from './InconsistentResponseError.ts' import PaginationStream from './PaginationStream.ts' import PollingTimeoutError from './PollingTimeoutError.ts' -import { type Stream, sendTusRequest } from './tus.ts' +import type { Stream } from './tus.ts' +import { sendTusRequest } from './tus.ts' // See https://github.com/sindresorhus/got/tree/v11.8.6?tab=readme-ov-file#errors // Expose relevant errors @@ -95,6 +91,11 @@ export interface CreateAssemblyOptions { onUploadProgress?: (uploadProgress: UploadProgress) => void onAssemblyProgress?: AssemblyProgress assemblyId?: string + /** + * Optional AbortSignal to cancel the assembly creation and upload. + * When aborted, any in-flight HTTP requests and TUS uploads will be cancelled. + */ + signal?: AbortSignal } export interface AwaitAssemblyCompletionOptions { @@ -102,6 +103,17 @@ export interface AwaitAssemblyCompletionOptions { timeout?: number interval?: number startTimeMs?: number + /** + * Optional AbortSignal to cancel polling. + * When aborted, the polling loop will stop and throw an AbortError. + */ + signal?: AbortSignal + /** + * Optional callback invoked before each poll iteration. + * Return `false` to stop polling early and return the current assembly status. + * Useful for watch mode where a newer job may supersede the current one. + */ + onPoll?: () => boolean | undefined } export interface SmartCDNUrlOptions { @@ -236,6 +248,7 @@ export class Transloadit { files = {}, uploads = {}, assemblyId, + signal, } = opts // Keep track of how long the request took @@ -293,12 +306,18 @@ export class Transloadit { stream.pause() } - // If any stream emits error, we want to handle this and exit with error + // If any stream emits error, we want to handle this and exit with error. + // This promise races against createAssemblyAndUpload() below via Promise.race(). + // When createAssemblyAndUpload wins the race, this promise becomes "orphaned" - + // it's no longer awaited, but stream error handlers remain attached. + // The no-op catch prevents Node's unhandled rejection warning if a stream + // errors after the race is already won. const streamErrorPromise = new Promise((_resolve, reject) => { for (const { stream } of allStreams) { stream.on('error', reject) } }) + streamErrorPromise.catch(() => {}) const createAssemblyAndUpload = async () => { const result: AssemblyStatus = await this._remoteJson({ @@ -309,6 +328,7 @@ export class Transloadit { fields: { tus_num_expected_upload_files: allStreams.length, }, + signal, }) checkResult(result) @@ -319,6 +339,7 @@ export class Transloadit { onProgress: onUploadProgress, requestedChunkSize, uploadConcurrency, + signal, }) } @@ -333,6 +354,7 @@ export class Transloadit { timeout, onAssemblyProgress, startTimeMs, + signal, }) checkResult(awaitResult) return awaitResult @@ -352,12 +374,27 @@ export class Transloadit { timeout, startTimeMs = getHrTimeMs(), interval = 1000, + signal, + onPoll, }: AwaitAssemblyCompletionOptions = {}, ): Promise { assert.ok(assemblyId) + let lastResult: AssemblyStatus | undefined + while (true) { - const result = await this.getAssembly(assemblyId) + // Check if caller wants to stop polling early + if (onPoll?.() === false && lastResult) { + return lastResult + } + + // Check if aborted before making the request + if (signal?.aborted) { + throw signal.reason ?? new DOMException('Aborted', 'AbortError') + } + + const result = await this.getAssembly(assemblyId, { signal }) + lastResult = result // If 'ok' is not in result, it implies a terminal state (e.g., error, completed, canceled). // If 'ok' is present, then we check if it's one of the non-terminal polling states. @@ -385,7 +422,21 @@ export class Transloadit { if (timeout != null && nowMs - startTimeMs >= timeout) { throw new PollingTimeoutError('Polling timed out') } - await new Promise((resolve) => setTimeout(resolve, interval)) + + // Make the sleep abortable, ensuring listener cleanup to prevent memory leaks + await new Promise((resolve, reject) => { + const timeoutId = setTimeout(() => { + signal?.removeEventListener('abort', onAbort) + resolve() + }, interval) + + function onAbort() { + clearTimeout(timeoutId) + reject(signal?.reason ?? new DOMException('Aborted', 'AbortError')) + } + + signal?.addEventListener('abort', onAbort, { once: true }) + }) } } @@ -460,7 +511,7 @@ export class Transloadit { assemblyId: string, params: ReplayAssemblyNotificationParams = {}, ): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/assembly_notifications/${assemblyId}/replay`, method: 'post', ...(Object.keys(params).length > 0 && { params }), @@ -517,11 +568,16 @@ export class Transloadit { * Get an Assembly * * @param assemblyId the Assembly Id + * @param options optional request options * @returns the retrieved Assembly */ - async getAssembly(assemblyId: string): Promise { + async getAssembly( + assemblyId: string, + options?: { signal?: AbortSignal }, + ): Promise { const rawResult = await this._remoteJson, OptionalAuthParams>({ urlSuffix: `/assemblies/${assemblyId}`, + signal: options?.signal, }) const parsedResult = zodParseWithContext(assemblyStatusSchema, rawResult) @@ -545,7 +601,7 @@ export class Transloadit { async createTemplateCredential( params: CreateTemplateCredentialParams, ): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: '/template_credentials', method: 'post', params: params || {}, @@ -563,7 +619,7 @@ export class Transloadit { credentialId: string, params: CreateTemplateCredentialParams, ): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/template_credentials/${credentialId}`, method: 'put', params: params || {}, @@ -577,7 +633,7 @@ export class Transloadit { * @returns when the Credential is deleted */ async deleteTemplateCredential(credentialId: string): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/template_credentials/${credentialId}`, method: 'delete', }) @@ -590,7 +646,7 @@ export class Transloadit { * @returns when the Credential is retrieved */ async getTemplateCredential(credentialId: string): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/template_credentials/${credentialId}`, method: 'get', }) @@ -605,7 +661,7 @@ export class Transloadit { async listTemplateCredentials( params?: ListTemplateCredentialsParams, ): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: '/template_credentials', method: 'get', params: params || {}, @@ -625,7 +681,7 @@ export class Transloadit { * @returns when the template is created */ async createTemplate(params: CreateTemplateParams): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: '/templates', method: 'post', params: params || {}, @@ -640,7 +696,7 @@ export class Transloadit { * @returns when the template is edited */ async editTemplate(templateId: string, params: EditTemplateParams): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/templates/${templateId}`, method: 'put', params: params || {}, @@ -654,7 +710,7 @@ export class Transloadit { * @returns when the template is deleted */ async deleteTemplate(templateId: string): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/templates/${templateId}`, method: 'delete', }) @@ -667,7 +723,7 @@ export class Transloadit { * @returns when the template is retrieved */ async getTemplate(templateId: string): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/templates/${templateId}`, method: 'get', }) @@ -682,7 +738,7 @@ export class Transloadit { async listTemplates( params?: ListTemplatesParams, ): Promise> { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: '/templates', method: 'get', params: params || {}, @@ -702,7 +758,7 @@ export class Transloadit { */ async getBill(month: string): Promise { assert.ok(month, 'month is required') - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/bill/${month}`, method: 'get', }) @@ -799,14 +855,14 @@ export class Transloadit { if (params == null) { params = {} } - if (params['auth'] == null) { - params['auth'] = {} + if (params.auth == null) { + params.auth = {} } - if (params['auth'].key == null) { - params['auth'].key = this._authKey + if (params.auth.key == null) { + params.auth.key = this._authKey } - if (params['auth'].expires == null) { - params['auth'].expires = this._getExpiresDate() + if (params.auth.expires == null) { + params.auth.expires = this._getExpiresDate() } return JSON.stringify(params) @@ -830,6 +886,7 @@ export class Transloadit { params?: TParams fields?: Fields headers?: Headers + signal?: AbortSignal }): Promise { const { urlSuffix, @@ -839,6 +896,7 @@ export class Transloadit { params = {}, fields, headers, + signal, } = opts // Allow providing either a `urlSuffix` or a full `url` @@ -871,6 +929,7 @@ export class Transloadit { ...headers, }, responseType: 'json', + signal, } try { @@ -904,7 +963,7 @@ export class Transloadit { const { retryIn: retryInSec } = body.info logWarn(`Rate limit reached, retrying request in approximately ${retryInSec} seconds.`) const retryInMs = 1000 * (retryInSec * (1 + 0.1 * Math.random())) - await new Promise((resolve) => setTimeout(resolve, retryInMs)) + await delay(retryInMs) // Retry } else { throw new ApiError({ diff --git a/src/apiTypes.ts b/src/apiTypes.ts index a5f28b1c..28d474c8 100644 --- a/src/apiTypes.ts +++ b/src/apiTypes.ts @@ -44,7 +44,7 @@ export type ListAssembliesParams = OptionalAuthParams & { export type ReplayAssemblyParams = Pick< CreateAssemblyParams, - 'auth' | 'template_id' | 'notify_url' | 'fields' + 'auth' | 'template_id' | 'notify_url' | 'fields' | 'steps' > & { reparse_template?: number } diff --git a/src/cli.ts b/src/cli.ts index dc084ed8..bdcd0b93 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -4,310 +4,8 @@ import { realpathSync } from 'node:fs' import path from 'node:path' import process from 'node:process' import { fileURLToPath } from 'node:url' -import { type ZodIssue, z } from 'zod' -import { - assemblyAuthInstructionsSchema, - assemblyInstructionsSchema, -} from './alphalib/types/template.ts' -import type { OptionalAuthParams } from './apiTypes.ts' -import { Transloadit } from './Transloadit.ts' - -type UrlParamPrimitive = string | number | boolean -type UrlParamArray = UrlParamPrimitive[] -type NormalizedUrlParams = Record - -interface RunSigOptions { - providedInput?: string - algorithm?: string -} - -interface RunSmartSigOptions { - providedInput?: string -} - -const smartCdnParamsSchema = z - .object({ - workspace: z.string().min(1, 'workspace is required'), - template: z.string().min(1, 'template is required'), - input: z.union([z.string(), z.number(), z.boolean()]), - url_params: z.record(z.unknown()).optional(), - expire_at_ms: z.union([z.number(), z.string()]).optional(), - }) - .passthrough() - -const cliSignatureParamsSchema = assemblyInstructionsSchema - .extend({ auth: assemblyAuthInstructionsSchema.partial().optional() }) - .partial() - .passthrough() - -export async function readStdin(): Promise { - if (process.stdin.isTTY) return '' - - process.stdin.setEncoding('utf8') - let data = '' - - for await (const chunk of process.stdin) { - data += chunk - } - - return data -} - -function fail(message: string): void { - console.error(message) - process.exitCode = 1 -} - -function formatIssues(issues: ZodIssue[]): string { - return issues - .map((issue) => { - const path = issue.path.join('.') || '(root)' - return `${path}: ${issue.message}` - }) - .join('; ') -} - -function normalizeUrlParam(value: unknown): UrlParamPrimitive | UrlParamArray | undefined { - if (value == null) return undefined - if (Array.isArray(value)) { - const normalized = value.filter( - (item): item is UrlParamPrimitive => - typeof item === 'string' || typeof item === 'number' || typeof item === 'boolean', - ) - return normalized.length > 0 ? normalized : undefined - } - if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { - return value - } - return undefined -} - -function normalizeUrlParams(params?: Record): NormalizedUrlParams | undefined { - if (params == null) return undefined - let normalized: NormalizedUrlParams | undefined - for (const [key, value] of Object.entries(params)) { - const normalizedValue = normalizeUrlParam(value) - if (normalizedValue === undefined) continue - if (normalized == null) normalized = {} - normalized[key] = normalizedValue - } - return normalized -} - -function ensureCredentials(): { authKey: string; authSecret: string } | null { - const authKey = process.env.TRANSLOADIT_KEY || process.env.TRANSLOADIT_AUTH_KEY - const authSecret = process.env.TRANSLOADIT_SECRET || process.env.TRANSLOADIT_AUTH_SECRET - - if (!authKey || !authSecret) { - fail( - 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', - ) - return null - } - - return { authKey, authSecret } -} - -export async function runSig(options: RunSigOptions = {}): Promise { - const credentials = ensureCredentials() - if (credentials == null) return - const { authKey, authSecret } = credentials - const { providedInput, algorithm } = options - - const rawInput = providedInput ?? (await readStdin()) - const input = rawInput.trim() - let params: Record - - if (input === '') { - params = { auth: { key: authKey } } - } else { - let parsed: unknown - try { - parsed = JSON.parse(input) - } catch (error) { - fail(`Failed to parse JSON from stdin: ${(error as Error).message}`) - return - } - - if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { - fail('Invalid params provided via stdin. Expected a JSON object.') - return - } - - const parsedResult = cliSignatureParamsSchema.safeParse(parsed) - if (!parsedResult.success) { - fail(`Invalid params: ${formatIssues(parsedResult.error.issues)}`) - return - } - - const parsedParams = parsedResult.data as Record - const existingAuth = - typeof parsedParams.auth === 'object' && - parsedParams.auth != null && - !Array.isArray(parsedParams.auth) - ? (parsedParams.auth as Record) - : {} - - params = { - ...parsedParams, - auth: { - ...existingAuth, - key: authKey, - }, - } - } - - const client = new Transloadit({ authKey, authSecret }) - try { - const signature = client.calcSignature(params as OptionalAuthParams, algorithm) - process.stdout.write(`${JSON.stringify(signature)}\n`) - } catch (error) { - fail(`Failed to generate signature: ${(error as Error).message}`) - } -} - -export async function runSmartSig(options: RunSmartSigOptions = {}): Promise { - const credentials = ensureCredentials() - if (credentials == null) return - const { authKey, authSecret } = credentials - - const rawInput = options.providedInput ?? (await readStdin()) - const input = rawInput.trim() - if (input === '') { - fail( - 'Missing params provided via stdin. Expected a JSON object with workspace, template, input, and optional Smart CDN parameters.', - ) - return - } - - let parsed: unknown - try { - parsed = JSON.parse(input) - } catch (error) { - fail(`Failed to parse JSON from stdin: ${(error as Error).message}`) - return - } - - if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { - fail('Invalid params provided via stdin. Expected a JSON object.') - return - } - - const parsedResult = smartCdnParamsSchema.safeParse(parsed) - if (!parsedResult.success) { - fail(`Invalid params: ${formatIssues(parsedResult.error.issues)}`) - return - } - - const { workspace, template, input: inputFieldRaw, url_params, expire_at_ms } = parsedResult.data - const urlParams = normalizeUrlParams(url_params as Record | undefined) - - let expiresAt: number | undefined - if (typeof expire_at_ms === 'string') { - const parsedNumber = Number.parseInt(expire_at_ms, 10) - if (Number.isNaN(parsedNumber)) { - fail('Invalid params: expire_at_ms must be a number.') - return - } - expiresAt = parsedNumber - } else { - expiresAt = expire_at_ms - } - - const inputField = typeof inputFieldRaw === 'string' ? inputFieldRaw : String(inputFieldRaw) - - const client = new Transloadit({ authKey, authSecret }) - try { - const signedUrl = client.getSignedSmartCDNUrl({ - workspace, - template, - input: inputField, - urlParams, - expiresAt, - }) - process.stdout.write(`${signedUrl}\n`) - } catch (error) { - fail(`Failed to generate Smart CDN URL: ${(error as Error).message}`) - } -} - -function parseSigArguments(args: string[]): { algorithm?: string } { - let algorithm: string | undefined - let index = 0 - while (index < args.length) { - const arg = args[index] - if (arg === '--algorithm' || arg === '-a') { - const next = args[index + 1] - if (next == null || next.startsWith('-')) { - throw new Error('Missing value for --algorithm option') - } - algorithm = next - index += 2 - continue - } - if (arg.startsWith('--algorithm=')) { - const [, value] = arg.split('=', 2) - if (value === undefined || value === '') { - throw new Error('Missing value for --algorithm option') - } - algorithm = value - index += 1 - continue - } - throw new Error(`Unknown option: ${arg}`) - } - - return { algorithm } -} - -export async function main(args = process.argv.slice(2)): Promise { - const [command, ...commandArgs] = args - - switch (command) { - case 'smart_sig': { - await runSmartSig() - break - } - - case 'sig': { - try { - const { algorithm } = parseSigArguments(commandArgs) - await runSig({ algorithm }) - } catch (error) { - fail((error as Error).message) - } - break - } - - case '-h': - case '--help': - case undefined: { - process.stdout.write( - [ - 'Usage:', - ' npx transloadit smart_sig Read Smart CDN params JSON from stdin and output a signed URL.', - ' npx transloadit sig [--algorithm ] Read params JSON from stdin and output signed payload JSON.', - '', - 'Required JSON fields:', - ' smart_sig: workspace, template, input', - ' sig: none (object is optional)', - 'Optional JSON fields:', - ' smart_sig: expire_at_ms, url_params', - ' sig: auth.expires and any supported assembly params', - '', - 'Environment variables:', - ' TRANSLOADIT_KEY, TRANSLOADIT_SECRET', - ].join('\n'), - ) - if (command === undefined) process.exitCode = 1 - break - } - - default: { - fail(`Unknown command: ${command}`) - } - } -} +import 'dotenv/config' +import { createCli } from './cli/commands/index.ts' const currentFile = realpathSync(fileURLToPath(import.meta.url)) @@ -326,11 +24,20 @@ export function shouldRunCli(invoked?: string): boolean { return resolved === currentFile } +export async function main(args = process.argv.slice(2)): Promise { + const cli = createCli() + const exitCode = await cli.run(args) + if (exitCode !== 0) { + process.exitCode = exitCode + } +} + export function runCliWhenExecuted(): void { if (!shouldRunCli(process.argv[1])) return void main().catch((error) => { - fail((error as Error).message) + console.error((error as Error).message) + process.exitCode = 1 }) } diff --git a/src/cli/OutputCtl.ts b/src/cli/OutputCtl.ts new file mode 100644 index 00000000..144e4a4c --- /dev/null +++ b/src/cli/OutputCtl.ts @@ -0,0 +1,115 @@ +/** + * Log levels following syslog severity (https://en.wikipedia.org/wiki/Syslog#Severity_level) + * Lower numbers = more severe, higher numbers = more verbose + */ +export const LOG_LEVEL = { + ERR: 3, // Error conditions + WARN: 4, // Warning conditions + NOTICE: 5, // Normal but significant (default) + INFO: 6, // Informational + DEBUG: 7, // Debug-level messages + TRACE: 8, // Most verbose/detailed +} as const + +export type LogLevelName = keyof typeof LOG_LEVEL +export type LogLevelValue = (typeof LOG_LEVEL)[LogLevelName] + +export const LOG_LEVEL_DEFAULT: LogLevelValue = LOG_LEVEL.NOTICE + +/** Valid log level names for CLI parsing */ +export const LOG_LEVEL_NAMES = Object.keys(LOG_LEVEL).map((k) => + k.toLowerCase(), +) as Lowercase[] + +/** Valid numeric log level values */ +const LOG_LEVEL_VALUES = new Set(Object.values(LOG_LEVEL)) + +/** Parse a log level string (name or number) to its numeric value */ +export function parseLogLevel(level: string): LogLevelValue { + // Try parsing as number first + const num = Number(level) + if (!Number.isNaN(num)) { + if (LOG_LEVEL_VALUES.has(num as LogLevelValue)) { + return num as LogLevelValue + } + throw new Error( + `Invalid log level: ${level}. Valid values: ${[...LOG_LEVEL_VALUES].join(', ')} or ${LOG_LEVEL_NAMES.join(', ')}`, + ) + } + + // Try as level name + const upper = level.toUpperCase() as LogLevelName + if (upper in LOG_LEVEL) { + return LOG_LEVEL[upper] + } + throw new Error( + `Invalid log level: ${level}. Valid levels: ${LOG_LEVEL_NAMES.join(', ')} or ${[...LOG_LEVEL_VALUES].join(', ')}`, + ) +} + +export interface OutputCtlOptions { + logLevel?: LogLevelValue + jsonMode?: boolean +} + +/** Interface for output controllers (used to allow test mocks) */ +export interface IOutputCtl { + error(msg: unknown): void + warn(msg: unknown): void + notice(msg: unknown): void + info(msg: unknown): void + debug(msg: unknown): void + trace(msg: unknown): void + print(simple: unknown, json: unknown): void +} + +export default class OutputCtl implements IOutputCtl { + private json: boolean + private logLevel: LogLevelValue + + constructor({ logLevel = LOG_LEVEL_DEFAULT, jsonMode = false }: OutputCtlOptions = {}) { + this.json = jsonMode + this.logLevel = logLevel + + process.stdout.on('error', (err: NodeJS.ErrnoException) => { + if (err.code === 'EPIPE') { + process.exitCode = 0 + } + }) + process.stderr.on('error', (err: NodeJS.ErrnoException) => { + if (err.code === 'EPIPE') { + process.exitCode = 0 + } + }) + } + + error(msg: unknown): void { + if (this.logLevel >= LOG_LEVEL.ERR) console.error('err ', msg) + } + + warn(msg: unknown): void { + if (this.logLevel >= LOG_LEVEL.WARN) console.error('warn ', msg) + } + + notice(msg: unknown): void { + if (this.logLevel >= LOG_LEVEL.NOTICE) console.error('notice ', msg) + } + + info(msg: unknown): void { + if (this.logLevel >= LOG_LEVEL.INFO) console.error('info ', msg) + } + + debug(msg: unknown): void { + if (this.logLevel >= LOG_LEVEL.DEBUG) console.error('debug ', msg) + } + + trace(msg: unknown): void { + if (this.logLevel >= LOG_LEVEL.TRACE) console.error('trace ', msg) + } + + print(simple: unknown, json: unknown): void { + if (this.json) console.log(JSON.stringify(json)) + else if (typeof simple === 'string') console.log(simple) + else console.dir(simple, { depth: null }) + } +} diff --git a/src/cli/commands/BaseCommand.ts b/src/cli/commands/BaseCommand.ts new file mode 100644 index 00000000..ee287239 --- /dev/null +++ b/src/cli/commands/BaseCommand.ts @@ -0,0 +1,73 @@ +import process from 'node:process' +import { Command, Option } from 'clipanion' +import 'dotenv/config' +import { Transloadit as TransloaditClient } from '../../Transloadit.ts' +import type { IOutputCtl } from '../OutputCtl.ts' +import OutputCtl, { LOG_LEVEL_DEFAULT, LOG_LEVEL_NAMES, parseLogLevel } from '../OutputCtl.ts' + +export abstract class BaseCommand extends Command { + logLevelOption = Option.String('-l,--log-level', { + description: `Log level: ${LOG_LEVEL_NAMES.join(', ')} or 3-8 (default: notice)`, + }) + + json = Option.Boolean('-j,--json', false, { + description: 'Output in JSON format', + }) + + endpoint = Option.String('--endpoint', { + description: + 'API endpoint URL (default: https://api2.transloadit.com, or TRANSLOADIT_ENDPOINT env var)', + }) + + protected output!: IOutputCtl + protected client!: TransloaditClient + + protected setupOutput(): void { + const logLevel = this.logLevelOption ? parseLogLevel(this.logLevelOption) : LOG_LEVEL_DEFAULT + this.output = new OutputCtl({ + logLevel, + jsonMode: this.json, + }) + } + + protected setupClient(): boolean { + if (!process.env.TRANSLOADIT_KEY || !process.env.TRANSLOADIT_SECRET) { + this.output.error( + 'Please provide API authentication in the environment variables TRANSLOADIT_KEY and TRANSLOADIT_SECRET', + ) + return false + } + + const endpoint = this.endpoint || process.env.TRANSLOADIT_ENDPOINT + + this.client = new TransloaditClient({ + authKey: process.env.TRANSLOADIT_KEY, + authSecret: process.env.TRANSLOADIT_SECRET, + ...(endpoint && { endpoint }), + }) + return true + } + + abstract override execute(): Promise +} + +export abstract class AuthenticatedCommand extends BaseCommand { + override async execute(): Promise { + this.setupOutput() + if (!this.setupClient()) { + return 1 + } + return await this.run() + } + + protected abstract run(): Promise +} + +export abstract class UnauthenticatedCommand extends BaseCommand { + override async execute(): Promise { + this.setupOutput() + return await this.run() + } + + protected abstract run(): Promise +} diff --git a/src/cli/commands/assemblies.ts b/src/cli/commands/assemblies.ts new file mode 100644 index 00000000..bab5bbc2 --- /dev/null +++ b/src/cli/commands/assemblies.ts @@ -0,0 +1,1373 @@ +import EventEmitter from 'node:events' +import fs from 'node:fs' +import fsp from 'node:fs/promises' +import path from 'node:path' +import process from 'node:process' +import type { Readable, Writable } from 'node:stream' +import { pipeline } from 'node:stream/promises' +import { setTimeout as delay } from 'node:timers/promises' +import tty from 'node:tty' +import { promisify } from 'node:util' +import { Command, Option } from 'clipanion' +import got from 'got' +import PQueue from 'p-queue' +import * as t from 'typanion' +import { z } from 'zod' +import { tryCatch } from '../../alphalib/tryCatch.ts' +import type { Steps, StepsInput } from '../../alphalib/types/template.ts' +import { stepsSchema } from '../../alphalib/types/template.ts' +import type { CreateAssemblyParams, ReplayAssemblyParams } from '../../apiTypes.ts' +import type { CreateAssemblyOptions, Transloadit } from '../../Transloadit.ts' +import { createReadStream, formatAPIError, streamToBuffer } from '../helpers.ts' +import type { IOutputCtl } from '../OutputCtl.ts' +import { ensureError, isErrnoException } from '../types.ts' +import { AuthenticatedCommand } from './BaseCommand.ts' + +// --- From assemblies.ts: Schemas and interfaces --- +export interface AssemblyListOptions { + before?: string + after?: string + fields?: string[] + keywords?: string[] + pagesize?: number +} + +export interface AssemblyGetOptions { + assemblies: string[] +} + +export interface AssemblyDeleteOptions { + assemblies: string[] +} + +export interface AssemblyReplayOptions { + fields?: Record + reparse?: boolean + steps?: string + notify_url?: string + assemblies: string[] +} + +const AssemblySchema = z.object({ + id: z.string(), +}) + +// --- Business logic functions (from assemblies.ts) --- + +export function list( + output: IOutputCtl, + client: Transloadit, + { before, after, fields, keywords }: AssemblyListOptions, +): Promise { + const assemblies = client.streamAssemblies({ + fromdate: after, + todate: before, + keywords, + }) + + assemblies.on('readable', () => { + const assembly: unknown = assemblies.read() + if (assembly == null) return + + const parsed = AssemblySchema.safeParse(assembly) + if (!parsed.success) return + + if (fields == null) { + output.print(parsed.data.id, assembly) + } else { + const assemblyRecord = assembly as Record + output.print(fields.map((field) => assemblyRecord[field]).join(' '), assembly) + } + }) + + return new Promise((resolve) => { + assemblies.on('end', resolve) + assemblies.on('error', (err: unknown) => { + output.error(formatAPIError(err)) + resolve() + }) + }) +} + +export async function get( + output: IOutputCtl, + client: Transloadit, + { assemblies }: AssemblyGetOptions, +): Promise { + for (const assembly of assemblies) { + await delay(1000) + const [err, result] = await tryCatch(client.getAssembly(assembly)) + if (err) { + output.error(formatAPIError(err)) + throw ensureError(err) + } + output.print(result, result) + } +} + +async function deleteAssemblies( + output: IOutputCtl, + client: Transloadit, + { assemblies }: AssemblyDeleteOptions, +): Promise { + const promises = assemblies.map(async (assembly) => { + const [err] = await tryCatch(client.cancelAssembly(assembly)) + if (err) { + output.error(formatAPIError(err)) + } + }) + await Promise.all(promises) +} + +// Export with `delete` alias for tests (can't use `delete` as function name) +export { deleteAssemblies as delete } + +export async function replay( + output: IOutputCtl, + client: Transloadit, + { fields, reparse, steps, notify_url, assemblies }: AssemblyReplayOptions, +): Promise { + if (steps) { + try { + const buf = await streamToBuffer(createReadStream(steps)) + const parsed: unknown = JSON.parse(buf.toString()) + const validated = stepsSchema.safeParse(parsed) + if (!validated.success) { + throw new Error(`Invalid steps format: ${validated.error.message}`) + } + await apiCall(validated.data) + } catch (err) { + const error = ensureError(err) + output.error(error.message) + } + } else { + await apiCall() + } + + async function apiCall(stepsOverride?: Steps): Promise { + const promises = assemblies.map(async (assembly) => { + const [err] = await tryCatch( + client.replayAssembly(assembly, { + reparse_template: reparse ? 1 : 0, + fields, + notify_url, + // Steps (validated) is assignable to StepsInput at runtime; cast for TS + steps: stepsOverride as ReplayAssemblyParams['steps'], + }), + ) + if (err) { + output.error(formatAPIError(err)) + } + }) + await Promise.all(promises) + } +} + +// --- From assemblies-create.ts: Helper classes and functions --- +interface NodeWatcher { + on(event: 'error', listener: (err: Error) => void): void + on(event: 'close', listener: () => void): void + on(event: 'change', listener: (evt: string, filename: string) => void): void + on(event: string, listener: (...args: unknown[]) => void): void + close(): void +} + +type NodeWatchFn = (path: string, options?: { recursive?: boolean }) => NodeWatcher + +let nodeWatch: NodeWatchFn | undefined + +async function getNodeWatch(): Promise { + if (!nodeWatch) { + const mod = (await import('node-watch')) as unknown as { default: NodeWatchFn } + nodeWatch = mod.default + } + return nodeWatch +} + +// workaround for determining mime-type of stdin +const stdinWithPath = process.stdin as unknown as { path: string } +stdinWithPath.path = '/dev/stdin' + +interface OutStream extends Writable { + path?: string + mtime?: Date +} + +interface Job { + in: Readable | null + out: OutStream | null +} + +type OutstreamProvider = (inpath: string | null, indir?: string) => Promise + +interface StreamRegistry { + [key: string]: OutStream | undefined +} + +interface JobEmitterOptions { + recursive?: boolean + outstreamProvider: OutstreamProvider + streamRegistry: StreamRegistry + watch?: boolean + reprocessStale?: boolean +} + +interface ReaddirJobEmitterOptions { + dir: string + streamRegistry: StreamRegistry + recursive?: boolean + outstreamProvider: OutstreamProvider + topdir?: string +} + +interface SingleJobEmitterOptions { + file: string + streamRegistry: StreamRegistry + outstreamProvider: OutstreamProvider +} + +interface WatchJobEmitterOptions { + file: string + streamRegistry: StreamRegistry + recursive?: boolean + outstreamProvider: OutstreamProvider +} + +interface StatLike { + isDirectory(): boolean +} + +const fstatAsync = promisify(fs.fstat) + +async function myStat( + stdioStream: NodeJS.ReadStream | NodeJS.WriteStream, + filepath: string, +): Promise { + if (filepath === '-') { + const stream = stdioStream as NodeJS.ReadStream & { fd: number } + return await fstatAsync(stream.fd) + } + return await fsp.stat(filepath) +} + +function dirProvider(output: string): OutstreamProvider { + return async (inpath, indir = process.cwd()) => { + if (inpath == null || inpath === '-') { + throw new Error('You must provide an input to output to a directory') + } + + let relpath = path.relative(indir, inpath) + relpath = relpath.replace(/^(\.\.\/)+/, '') + const outpath = path.join(output, relpath) + const outdir = path.dirname(outpath) + + await fsp.mkdir(outdir, { recursive: true }) + const [, stats] = await tryCatch(fsp.stat(outpath)) + const mtime = stats?.mtime ?? new Date(0) + const outstream = fs.createWriteStream(outpath) as OutStream + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + outstream.on('error', () => {}) + outstream.mtime = mtime + return outstream + } +} + +function fileProvider(output: string): OutstreamProvider { + const dirExistsP = fsp.mkdir(path.dirname(output), { recursive: true }) + return async (_inpath) => { + await dirExistsP + if (output === '-') return process.stdout as OutStream + + const [, stats] = await tryCatch(fsp.stat(output)) + const mtime = stats?.mtime ?? new Date(0) + const outstream = fs.createWriteStream(output) as OutStream + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + outstream.on('error', () => {}) + outstream.mtime = mtime + return outstream + } +} + +function nullProvider(): OutstreamProvider { + return async (_inpath) => null +} + +class MyEventEmitter extends EventEmitter { + protected hasEnded: boolean + + constructor() { + super() + this.hasEnded = false + } + + override emit(event: string | symbol, ...args: unknown[]): boolean { + if (this.hasEnded) return false + if (event === 'end' || event === 'error') { + this.hasEnded = true + return super.emit(event, ...args) + } + return super.emit(event, ...args) + } +} + +class ReaddirJobEmitter extends MyEventEmitter { + constructor({ + dir, + streamRegistry, + recursive, + outstreamProvider, + topdir = dir, + }: ReaddirJobEmitterOptions) { + super() + + process.nextTick(() => { + this.processDirectory({ dir, streamRegistry, recursive, outstreamProvider, topdir }).catch( + (err) => { + this.emit('error', err) + }, + ) + }) + } + + private async processDirectory({ + dir, + streamRegistry, + recursive, + outstreamProvider, + topdir, + }: ReaddirJobEmitterOptions & { topdir: string }): Promise { + const files = await fsp.readdir(dir) + + const pendingOperations: Promise[] = [] + + for (const filename of files) { + const file = path.normalize(path.join(dir, filename)) + pendingOperations.push( + this.processFile({ file, streamRegistry, recursive, outstreamProvider, topdir }), + ) + } + + await Promise.all(pendingOperations) + this.emit('end') + } + + private async processFile({ + file, + streamRegistry, + recursive = false, + outstreamProvider, + topdir, + }: { + file: string + streamRegistry: StreamRegistry + recursive?: boolean + outstreamProvider: OutstreamProvider + topdir: string + }): Promise { + const stats = await fsp.stat(file) + + if (stats.isDirectory()) { + if (recursive) { + await new Promise((resolve, reject) => { + const subdirEmitter = new ReaddirJobEmitter({ + dir: file, + streamRegistry, + recursive, + outstreamProvider, + topdir, + }) + subdirEmitter.on('job', (job: Job) => this.emit('job', job)) + subdirEmitter.on('error', (error: Error) => reject(error)) + subdirEmitter.on('end', () => resolve()) + }) + } + } else { + const existing = streamRegistry[file] + if (existing) existing.end() + const outstream = await outstreamProvider(file, topdir) + streamRegistry[file] = outstream ?? undefined + const instream = fs.createReadStream(file) + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + instream.on('error', () => {}) + this.emit('job', { in: instream, out: outstream }) + } + } +} + +class SingleJobEmitter extends MyEventEmitter { + constructor({ file, streamRegistry, outstreamProvider }: SingleJobEmitterOptions) { + super() + + const normalizedFile = path.normalize(file) + const existing = streamRegistry[normalizedFile] + if (existing) existing.end() + outstreamProvider(normalizedFile).then((outstream) => { + streamRegistry[normalizedFile] = outstream ?? undefined + + let instream: Readable | null + if (normalizedFile === '-') { + if (tty.isatty(process.stdin.fd)) { + instream = null + } else { + instream = process.stdin + } + } else { + instream = fs.createReadStream(normalizedFile) + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + instream.on('error', () => {}) + } + + process.nextTick(() => { + this.emit('job', { in: instream, out: outstream }) + this.emit('end') + }) + }) + } +} + +class InputlessJobEmitter extends MyEventEmitter { + constructor({ + outstreamProvider, + }: { streamRegistry: StreamRegistry; outstreamProvider: OutstreamProvider }) { + super() + + process.nextTick(() => { + outstreamProvider(null).then((outstream) => { + try { + this.emit('job', { in: null, out: outstream }) + } catch (err) { + this.emit('error', err) + } + + this.emit('end') + }) + }) + } +} + +class NullJobEmitter extends MyEventEmitter { + constructor() { + super() + process.nextTick(() => this.emit('end')) + } +} + +class WatchJobEmitter extends MyEventEmitter { + private watcher: NodeWatcher | null = null + + constructor({ file, streamRegistry, recursive, outstreamProvider }: WatchJobEmitterOptions) { + super() + + this.init({ file, streamRegistry, recursive, outstreamProvider }).catch((err) => { + this.emit('error', err) + }) + + // Clean up watcher on process exit signals + const cleanup = () => this.close() + process.once('SIGINT', cleanup) + process.once('SIGTERM', cleanup) + } + + /** Close the file watcher and release resources */ + close(): void { + if (this.watcher) { + this.watcher.close() + this.watcher = null + } + } + + private async init({ + file, + streamRegistry, + recursive, + outstreamProvider, + }: WatchJobEmitterOptions): Promise { + const stats = await fsp.stat(file) + const topdir = stats.isDirectory() ? file : undefined + + const watchFn = await getNodeWatch() + this.watcher = watchFn(file, { recursive }) + + this.watcher.on('error', (err: Error) => { + this.close() + this.emit('error', err) + }) + this.watcher.on('close', () => this.emit('end')) + this.watcher.on('change', (_evt: string, filename: string) => { + const normalizedFile = path.normalize(filename) + this.handleChange(normalizedFile, topdir, streamRegistry, outstreamProvider).catch((err) => { + this.emit('error', err) + }) + }) + } + + private async handleChange( + normalizedFile: string, + topdir: string | undefined, + streamRegistry: StreamRegistry, + outstreamProvider: OutstreamProvider, + ): Promise { + const stats = await fsp.stat(normalizedFile) + if (stats.isDirectory()) return + + const existing = streamRegistry[normalizedFile] + if (existing) existing.end() + + const outstream = await outstreamProvider(normalizedFile, topdir) + streamRegistry[normalizedFile] = outstream ?? undefined + + const instream = fs.createReadStream(normalizedFile) + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + instream.on('error', () => {}) + this.emit('job', { in: instream, out: outstream }) + } +} + +class MergedJobEmitter extends MyEventEmitter { + constructor(...jobEmitters: MyEventEmitter[]) { + super() + + let ncomplete = 0 + + for (const jobEmitter of jobEmitters) { + jobEmitter.on('error', (err: Error) => this.emit('error', err)) + jobEmitter.on('job', (job: Job) => this.emit('job', job)) + jobEmitter.on('end', () => { + if (++ncomplete === jobEmitters.length) this.emit('end') + }) + } + + if (jobEmitters.length === 0) { + this.emit('end') + } + } +} + +class ConcattedJobEmitter extends MyEventEmitter { + constructor(emitterFn: () => MyEventEmitter, ...emitterFns: (() => MyEventEmitter)[]) { + super() + + const emitter = emitterFn() + + emitter.on('error', (err: Error) => this.emit('error', err)) + emitter.on('job', (job: Job) => this.emit('job', job)) + + if (emitterFns.length === 0) { + emitter.on('end', () => this.emit('end')) + } else { + emitter.on('end', () => { + const firstFn = emitterFns[0] + if (!firstFn) { + this.emit('end') + return + } + const restEmitter = new ConcattedJobEmitter(firstFn, ...emitterFns.slice(1)) + restEmitter.on('error', (err: Error) => this.emit('error', err)) + restEmitter.on('job', (job: Job) => this.emit('job', job)) + restEmitter.on('end', () => this.emit('end')) + }) + } + } +} + +function detectConflicts(jobEmitter: EventEmitter): MyEventEmitter { + const emitter = new MyEventEmitter() + const outfileAssociations: Record = {} + + jobEmitter.on('end', () => emitter.emit('end')) + jobEmitter.on('error', (err: Error) => emitter.emit('error', err)) + jobEmitter.on('job', (job: Job) => { + if (job.in == null || job.out == null) { + emitter.emit('job', job) + return + } + const inPath = (job.in as fs.ReadStream).path as string + const outPath = job.out.path as string + if (Object.hasOwn(outfileAssociations, outPath) && outfileAssociations[outPath] !== inPath) { + emitter.emit( + 'error', + new Error(`Output collision between '${inPath}' and '${outfileAssociations[outPath]}'`), + ) + } else { + outfileAssociations[outPath] = inPath + emitter.emit('job', job) + } + }) + + return emitter +} + +function dismissStaleJobs(jobEmitter: EventEmitter): MyEventEmitter { + const emitter = new MyEventEmitter() + const pendingChecks: Promise[] = [] + + jobEmitter.on('end', () => Promise.all(pendingChecks).then(() => emitter.emit('end'))) + jobEmitter.on('error', (err: Error) => emitter.emit('error', err)) + jobEmitter.on('job', (job: Job) => { + if (job.in == null || job.out == null) { + emitter.emit('job', job) + return + } + + const inPath = (job.in as fs.ReadStream).path as string + const checkPromise = fsp + .stat(inPath) + .then((stats) => { + const inM = stats.mtime + const outM = job.out?.mtime ?? new Date(0) + + if (outM <= inM) emitter.emit('job', job) + }) + .catch(() => { + emitter.emit('job', job) + }) + pendingChecks.push(checkPromise) + }) + + return emitter +} + +function makeJobEmitter( + inputs: string[], + { + recursive, + outstreamProvider, + streamRegistry, + watch: watchOption, + reprocessStale, + }: JobEmitterOptions, +): MyEventEmitter { + const emitter = new EventEmitter() + + const emitterFns: (() => MyEventEmitter)[] = [] + const watcherFns: (() => MyEventEmitter)[] = [] + + async function processInputs(): Promise { + for (const input of inputs) { + if (input === '-') { + emitterFns.push( + () => new SingleJobEmitter({ file: input, outstreamProvider, streamRegistry }), + ) + watcherFns.push(() => new NullJobEmitter()) + } else { + const stats = await fsp.stat(input) + if (stats.isDirectory()) { + emitterFns.push( + () => + new ReaddirJobEmitter({ dir: input, recursive, outstreamProvider, streamRegistry }), + ) + watcherFns.push( + () => + new WatchJobEmitter({ file: input, recursive, outstreamProvider, streamRegistry }), + ) + } else { + emitterFns.push( + () => new SingleJobEmitter({ file: input, outstreamProvider, streamRegistry }), + ) + watcherFns.push( + () => + new WatchJobEmitter({ file: input, recursive, outstreamProvider, streamRegistry }), + ) + } + } + } + + if (inputs.length === 0) { + emitterFns.push(() => new InputlessJobEmitter({ outstreamProvider, streamRegistry })) + } + + startEmitting() + } + + function startEmitting(): void { + let source: MyEventEmitter = new MergedJobEmitter(...emitterFns.map((f) => f())) + + if (watchOption) { + source = new ConcattedJobEmitter( + () => source, + () => new MergedJobEmitter(...watcherFns.map((f) => f())), + ) + } + + source.on('job', (job: Job) => emitter.emit('job', job)) + source.on('error', (err: Error) => emitter.emit('error', err)) + source.on('end', () => emitter.emit('end')) + } + + processInputs().catch((err) => { + emitter.emit('error', err) + }) + + const stalefilter = reprocessStale ? (x: EventEmitter) => x as MyEventEmitter : dismissStaleJobs + return stalefilter(detectConflicts(emitter)) +} + +export interface AssembliesCreateOptions { + steps?: string + template?: string + fields?: Record + watch?: boolean + recursive?: boolean + inputs: string[] + output?: string | null + del?: boolean + reprocessStale?: boolean + singleAssembly?: boolean + concurrency?: number +} + +const DEFAULT_CONCURRENCY = 5 + +// --- Main assembly create function --- +export async function create( + outputctl: IOutputCtl, + client: Transloadit, + { + steps, + template, + fields, + watch: watchOption, + recursive, + inputs, + output, + del, + reprocessStale, + singleAssembly, + concurrency = DEFAULT_CONCURRENCY, + }: AssembliesCreateOptions, +): Promise<{ results: unknown[]; hasFailures: boolean }> { + // Quick fix for https://github.com/transloadit/transloadify/issues/13 + // Only default to stdout when output is undefined (not provided), not when explicitly null + let resolvedOutput = output + if (resolvedOutput === undefined && !process.stdout.isTTY) resolvedOutput = '-' + + // Read steps file async before entering the Promise constructor + // We use StepsInput (the input type) rather than Steps (the transformed output type) + // to avoid zod adding default values that the API may reject + let stepsData: StepsInput | undefined + if (steps) { + const stepsContent = await fsp.readFile(steps, 'utf8') + const parsed: unknown = JSON.parse(stepsContent) + // Basic structural validation: must be an object with step names as keys + if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { + throw new Error('Invalid steps format: expected an object with step names as keys') + } + // Validate each step has a robot field + for (const [stepName, step] of Object.entries(parsed)) { + if (step == null || typeof step !== 'object' || Array.isArray(step)) { + throw new Error(`Invalid steps format: step '${stepName}' must be an object`) + } + if (!('robot' in step) || typeof (step as Record).robot !== 'string') { + throw new Error( + `Invalid steps format: step '${stepName}' must have a 'robot' string property`, + ) + } + } + stepsData = parsed as StepsInput + } + + // Determine output stat async before entering the Promise constructor + let outstat: StatLike | undefined + if (resolvedOutput != null) { + const [err, stat] = await tryCatch(myStat(process.stdout, resolvedOutput)) + if (err && (!isErrnoException(err) || err.code !== 'ENOENT')) throw err + outstat = stat ?? { isDirectory: () => false } + + if (!outstat.isDirectory() && inputs.length !== 0) { + const firstInput = inputs[0] + if (firstInput) { + const firstInputStat = await myStat(process.stdin, firstInput) + if (inputs.length > 1 || firstInputStat.isDirectory()) { + const msg = 'Output must be a directory when specifying multiple inputs' + outputctl.error(msg) + throw new Error(msg) + } + } + } + } + + return new Promise((resolve, reject) => { + const params: CreateAssemblyParams = ( + stepsData ? { steps: stepsData as CreateAssemblyParams['steps'] } : { template_id: template } + ) as CreateAssemblyParams + if (fields) { + params.fields = fields + } + + const outstreamProvider: OutstreamProvider = + resolvedOutput == null + ? nullProvider() + : outstat?.isDirectory() + ? dirProvider(resolvedOutput) + : fileProvider(resolvedOutput) + const streamRegistry: StreamRegistry = {} + + const emitter = makeJobEmitter(inputs, { + recursive, + watch: watchOption, + outstreamProvider, + streamRegistry, + reprocessStale, + }) + + // Use p-queue for concurrency management + const queue = new PQueue({ concurrency }) + const results: unknown[] = [] + let hasFailures = false + // AbortController to cancel all in-flight createAssembly calls when an error occurs + const abortController = new AbortController() + + // Helper to process a single assembly job + async function processAssemblyJob( + inPath: string | null, + outPath: string | null, + outMtime: Date | undefined, + ): Promise { + outputctl.debug(`PROCESSING JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) + + // Create fresh streams for this job + const inStream = inPath ? fs.createReadStream(inPath) : null + inStream?.on('error', () => {}) + const outStream = outPath ? (fs.createWriteStream(outPath) as OutStream) : null + outStream?.on('error', () => {}) + if (outStream) outStream.mtime = outMtime + + let superceded = false + if (outStream != null) { + outStream.on('finish', () => { + superceded = true + }) + } + + const createOptions: CreateAssemblyOptions = { + params, + signal: abortController.signal, + } + if (inStream != null) { + createOptions.uploads = { in: inStream } + } + + const result = await client.createAssembly(createOptions) + if (superceded) return undefined + + const assemblyId = result.assembly_id + if (!assemblyId) throw new Error('No assembly_id in result') + + const assembly = await client.awaitAssemblyCompletion(assemblyId, { + signal: abortController.signal, + onPoll: () => { + if (superceded) return false + return true + }, + onAssemblyProgress: (status) => { + outputctl.debug(`Assembly status: ${status.ok}`) + }, + }) + + if (superceded) return undefined + + if (assembly.error || (assembly.ok && assembly.ok !== 'ASSEMBLY_COMPLETED')) { + const msg = `Assembly failed: ${assembly.error || assembly.message} (Status: ${assembly.ok})` + outputctl.error(msg) + throw new Error(msg) + } + + if (!assembly.results) throw new Error('No results in assembly') + const resultsKeys = Object.keys(assembly.results) + const firstKey = resultsKeys[0] + if (!firstKey) throw new Error('No results in assembly') + const firstResult = assembly.results[firstKey] + if (!firstResult || !firstResult[0]) throw new Error('No results in assembly') + const resulturl = firstResult[0].url + + if (outStream != null && resulturl && !superceded) { + outputctl.debug('DOWNLOADING') + const [dlErr] = await tryCatch( + pipeline(got.stream(resulturl, { signal: abortController.signal }), outStream), + ) + if (dlErr) { + if (dlErr.name !== 'AbortError') { + outputctl.error(dlErr.message) + throw dlErr + } + } + } + + outputctl.debug(`COMPLETED ${inPath ?? 'null'} ${outPath ?? 'null'}`) + + if (del && inPath) { + await fsp.unlink(inPath) + } + return assembly + } + + if (singleAssembly) { + // Single-assembly mode: collect file paths, then create one assembly with all inputs + // We close streams immediately to avoid exhausting file descriptors with many files + const collectedPaths: string[] = [] + + emitter.on('job', (job: Job) => { + if (job.in != null) { + const inPath = (job.in as fs.ReadStream).path as string + outputctl.debug(`COLLECTING JOB ${inPath}`) + collectedPaths.push(inPath) + // Close the stream immediately to avoid file descriptor exhaustion + ;(job.in as fs.ReadStream).destroy() + outputctl.debug(`STREAM CLOSED ${inPath}`) + } + }) + + emitter.on('error', (err: Error) => { + abortController.abort() + queue.clear() + outputctl.error(err) + reject(err) + }) + + emitter.on('end', async () => { + if (collectedPaths.length === 0) { + resolve({ results: [], hasFailures: false }) + return + } + + // Build uploads object, creating fresh streams for each file + const uploads: Record = {} + const inputPaths: string[] = [] + for (const inPath of collectedPaths) { + const basename = path.basename(inPath) + let key = basename + let counter = 1 + while (key in uploads) { + key = `${path.parse(basename).name}_${counter}${path.parse(basename).ext}` + counter++ + } + uploads[key] = fs.createReadStream(inPath) + inputPaths.push(inPath) + } + + outputctl.debug(`Creating single assembly with ${Object.keys(uploads).length} files`) + + try { + const assembly = await queue.add(async () => { + const createOptions: CreateAssemblyOptions = { + params, + signal: abortController.signal, + } + if (Object.keys(uploads).length > 0) { + createOptions.uploads = uploads + } + + const result = await client.createAssembly(createOptions) + const assemblyId = result.assembly_id + if (!assemblyId) throw new Error('No assembly_id in result') + + const asm = await client.awaitAssemblyCompletion(assemblyId, { + signal: abortController.signal, + onAssemblyProgress: (status) => { + outputctl.debug(`Assembly status: ${status.ok}`) + }, + }) + + if (asm.error || (asm.ok && asm.ok !== 'ASSEMBLY_COMPLETED')) { + const msg = `Assembly failed: ${asm.error || asm.message} (Status: ${asm.ok})` + outputctl.error(msg) + throw new Error(msg) + } + + // Download all results + if (asm.results && resolvedOutput != null) { + for (const [stepName, stepResults] of Object.entries(asm.results)) { + for (const stepResult of stepResults) { + const resultUrl = stepResult.url + if (!resultUrl) continue + + let outPath: string + if (outstat?.isDirectory()) { + outPath = path.join(resolvedOutput, stepResult.name || `${stepName}_result`) + } else { + outPath = resolvedOutput + } + + outputctl.debug(`DOWNLOADING ${stepResult.name} to ${outPath}`) + const [dlErr] = await tryCatch( + pipeline( + got.stream(resultUrl, { signal: abortController.signal }), + fs.createWriteStream(outPath), + ), + ) + if (dlErr) { + if (dlErr.name === 'AbortError') continue + outputctl.error(dlErr.message) + throw dlErr + } + } + } + } + + // Delete input files if requested + if (del) { + for (const inPath of inputPaths) { + await fsp.unlink(inPath) + } + } + return asm + }) + results.push(assembly) + } catch (err) { + hasFailures = true + outputctl.error(err as Error) + } + + resolve({ results, hasFailures }) + }) + } else { + // Default mode: one assembly per file with p-queue concurrency limiting + emitter.on('job', (job: Job) => { + const inPath = job.in + ? (((job.in as fs.ReadStream).path as string | undefined) ?? null) + : null + const outPath = job.out?.path ?? null + const outMtime = job.out?.mtime + outputctl.debug(`GOT JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) + + // Close the original streams immediately - we'll create fresh ones when processing + if (job.in != null) { + ;(job.in as fs.ReadStream).destroy() + } + if (job.out != null) { + job.out.destroy() + } + + // Add job to queue - p-queue handles concurrency automatically + queue + .add(async () => { + const result = await processAssemblyJob(inPath, outPath, outMtime) + if (result !== undefined) { + results.push(result) + } + }) + .catch((err: unknown) => { + hasFailures = true + outputctl.error(err as Error) + }) + }) + + emitter.on('error', (err: Error) => { + abortController.abort() + queue.clear() + outputctl.error(err) + reject(err) + }) + + emitter.on('end', async () => { + // Wait for all queued jobs to complete + await queue.onIdle() + resolve({ results, hasFailures }) + }) + } + }) +} + +// --- Command classes --- +export class AssembliesCreateCommand extends AuthenticatedCommand { + static override paths = [ + ['assemblies', 'create'], + ['assembly', 'create'], + ['a', 'create'], + ['a', 'c'], + ] + + static override usage = Command.Usage({ + category: 'Assemblies', + description: 'Create assemblies to process media', + details: ` + Create assemblies to process media files using Transloadit. + You must specify either --steps or --template. + `, + examples: [ + [ + 'Process a file with steps', + 'transloadit assemblies create --steps steps.json -i input.jpg -o output.jpg', + ], + [ + 'Process with a template', + 'transloadit assemblies create --template TEMPLATE_ID -i input.jpg -o output/', + ], + [ + 'Watch for changes', + 'transloadit assemblies create --steps steps.json -i input/ -o output/ --watch', + ], + ], + }) + + steps = Option.String('--steps,-s', { + description: 'Specify assembly instructions with a JSON file', + }) + + template = Option.String('--template,-t', { + description: 'Specify a template to use for these assemblies', + }) + + inputs = Option.Array('--input,-i', { + description: 'Provide an input file or a directory', + }) + + outputPath = Option.String('--output,-o', { + description: 'Specify an output file or directory', + }) + + fields = Option.Array('--field,-f', { + description: 'Set a template field (KEY=VAL)', + }) + + watch = Option.Boolean('--watch,-w', false, { + description: 'Watch inputs for changes', + }) + + recursive = Option.Boolean('--recursive,-r', false, { + description: 'Enumerate input directories recursively', + }) + + deleteAfterProcessing = Option.Boolean('--delete-after-processing,-d', false, { + description: 'Delete input files after they are processed', + }) + + reprocessStale = Option.Boolean('--reprocess-stale', false, { + description: 'Process inputs even if output is newer', + }) + + singleAssembly = Option.Boolean('--single-assembly', false, { + description: 'Pass all input files to a single assembly instead of one assembly per file', + }) + + concurrency = Option.String('--concurrency,-c', { + description: 'Maximum number of concurrent assemblies (default: 5)', + validator: t.isNumber(), + }) + + protected async run(): Promise { + if (!this.steps && !this.template) { + this.output.error('assemblies create requires exactly one of either --steps or --template') + return 1 + } + if (this.steps && this.template) { + this.output.error('assemblies create requires exactly one of either --steps or --template') + return 1 + } + + const inputList = this.inputs ?? [] + if (inputList.length === 0 && this.watch) { + this.output.error('assemblies create --watch requires at least one input') + return 1 + } + + // Default to stdin if no inputs and not a TTY + if (inputList.length === 0 && !process.stdin.isTTY) { + inputList.push('-') + } + + const fieldsMap: Record = {} + for (const field of this.fields ?? []) { + const eqIndex = field.indexOf('=') + if (eqIndex === -1) { + this.output.error(`invalid argument for --field: '${field}'`) + return 1 + } + const key = field.slice(0, eqIndex) + const value = field.slice(eqIndex + 1) + fieldsMap[key] = value + } + + if (this.singleAssembly && this.watch) { + this.output.error('--single-assembly cannot be used with --watch') + return 1 + } + + const { hasFailures } = await create(this.output, this.client, { + steps: this.steps, + template: this.template, + fields: fieldsMap, + watch: this.watch, + recursive: this.recursive, + inputs: inputList, + output: this.outputPath ?? null, + del: this.deleteAfterProcessing, + reprocessStale: this.reprocessStale, + singleAssembly: this.singleAssembly, + concurrency: this.concurrency, + }) + return hasFailures ? 1 : undefined + } +} + +export class AssembliesListCommand extends AuthenticatedCommand { + static override paths = [ + ['assemblies', 'list'], + ['assembly', 'list'], + ['a', 'list'], + ['a', 'l'], + ] + + static override usage = Command.Usage({ + category: 'Assemblies', + description: 'List assemblies matching given criteria', + examples: [ + ['List recent assemblies', 'transloadit assemblies list'], + ['List assemblies after a date', 'transloadit assemblies list --after 2024-01-01'], + ], + }) + + before = Option.String('--before,-b', { + description: 'Return only assemblies created before specified date', + }) + + after = Option.String('--after,-a', { + description: 'Return only assemblies created after specified date', + }) + + keywords = Option.String('--keywords', { + description: 'Comma-separated list of keywords to match assemblies', + }) + + fields = Option.String('--fields', { + description: 'Comma-separated list of fields to return for each assembly', + }) + + protected async run(): Promise { + const keywordList = this.keywords ? this.keywords.split(',') : undefined + const fieldList = this.fields ? this.fields.split(',') : undefined + + await list(this.output, this.client, { + before: this.before, + after: this.after, + keywords: keywordList, + fields: fieldList, + }) + return undefined + } +} + +export class AssembliesGetCommand extends AuthenticatedCommand { + static override paths = [ + ['assemblies', 'get'], + ['assembly', 'get'], + ['a', 'get'], + ['a', 'g'], + ] + + static override usage = Command.Usage({ + category: 'Assemblies', + description: 'Fetch assembly statuses', + examples: [['Get assembly status', 'transloadit assemblies get ASSEMBLY_ID']], + }) + + assemblyIds = Option.Rest({ required: 1 }) + + protected async run(): Promise { + await get(this.output, this.client, { + assemblies: this.assemblyIds, + }) + return undefined + } +} + +export class AssembliesDeleteCommand extends AuthenticatedCommand { + static override paths = [ + ['assemblies', 'delete'], + ['assembly', 'delete'], + ['a', 'delete'], + ['a', 'd'], + ['assemblies', 'cancel'], + ['assembly', 'cancel'], + ] + + static override usage = Command.Usage({ + category: 'Assemblies', + description: 'Cancel assemblies', + examples: [['Cancel an assembly', 'transloadit assemblies delete ASSEMBLY_ID']], + }) + + assemblyIds = Option.Rest({ required: 1 }) + + protected async run(): Promise { + await deleteAssemblies(this.output, this.client, { + assemblies: this.assemblyIds, + }) + return undefined + } +} + +export class AssembliesReplayCommand extends AuthenticatedCommand { + static override paths = [ + ['assemblies', 'replay'], + ['assembly', 'replay'], + ['a', 'replay'], + ['a', 'r'], + ] + + static override usage = Command.Usage({ + category: 'Assemblies', + description: 'Replay assemblies', + details: ` + Replay one or more assemblies. By default, replays use the original assembly instructions. + Use --steps to override the instructions, or --reparse-template to use the latest template version. + `, + examples: [ + ['Replay an assembly with original steps', 'transloadit assemblies replay ASSEMBLY_ID'], + [ + 'Replay with different steps', + 'transloadit assemblies replay --steps new-steps.json ASSEMBLY_ID', + ], + [ + 'Replay with updated template', + 'transloadit assemblies replay --reparse-template ASSEMBLY_ID', + ], + ], + }) + + fields = Option.Array('--field,-f', { + description: 'Set a template field (KEY=VAL)', + }) + + steps = Option.String('--steps,-s', { + description: 'Optional JSON file to override assembly instructions', + }) + + notifyUrl = Option.String('--notify-url', { + description: 'Specify a new URL for assembly notifications', + }) + + reparseTemplate = Option.Boolean('--reparse-template', false, { + description: 'Use the most up-to-date version of the template', + }) + + assemblyIds = Option.Rest({ required: 1 }) + + protected async run(): Promise { + const fieldsMap: Record = {} + for (const field of this.fields ?? []) { + const eqIndex = field.indexOf('=') + if (eqIndex === -1) { + this.output.error(`invalid argument for --field: '${field}'`) + return 1 + } + const key = field.slice(0, eqIndex) + const value = field.slice(eqIndex + 1) + fieldsMap[key] = value + } + + await replay(this.output, this.client, { + fields: fieldsMap, + reparse: this.reparseTemplate, + steps: this.steps, + notify_url: this.notifyUrl, + assemblies: this.assemblyIds, + }) + return undefined + } +} diff --git a/src/cli/commands/auth.ts b/src/cli/commands/auth.ts new file mode 100644 index 00000000..03b1818e --- /dev/null +++ b/src/cli/commands/auth.ts @@ -0,0 +1,362 @@ +import process from 'node:process' +import { Command, Option } from 'clipanion' +import type { ZodIssue } from 'zod' +import { z } from 'zod' +import { + assemblyAuthInstructionsSchema, + assemblyInstructionsSchema, +} from '../../alphalib/types/template.ts' +import type { OptionalAuthParams } from '../../apiTypes.ts' +import { Transloadit } from '../../Transloadit.ts' +import { UnauthenticatedCommand } from './BaseCommand.ts' + +type UrlParamPrimitive = string | number | boolean +type UrlParamArray = UrlParamPrimitive[] +type NormalizedUrlParams = Record + +const smartCdnParamsSchema = z + .object({ + workspace: z.string().min(1, 'workspace is required'), + template: z.string().min(1, 'template is required'), + input: z.union([z.string(), z.number(), z.boolean()]), + url_params: z.record(z.unknown()).optional(), + expire_at_ms: z.union([z.number(), z.string()]).optional(), + }) + .passthrough() + +const cliSignatureParamsSchema = assemblyInstructionsSchema + .extend({ auth: assemblyAuthInstructionsSchema.partial().optional() }) + .partial() + .passthrough() + +type CliSignatureParams = z.infer + +function formatIssues(issues: ZodIssue[]): string { + return issues + .map((issue) => { + const path = issue.path.join('.') || '(root)' + return `${path}: ${issue.message}` + }) + .join('; ') +} + +function normalizeUrlParam(value: unknown): UrlParamPrimitive | UrlParamArray | undefined { + if (value == null) return undefined + if (Array.isArray(value)) { + const normalized = value.filter( + (item): item is UrlParamPrimitive => + typeof item === 'string' || typeof item === 'number' || typeof item === 'boolean', + ) + return normalized.length > 0 ? normalized : undefined + } + if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { + return value + } + return undefined +} + +function normalizeUrlParams(params?: Record): NormalizedUrlParams | undefined { + if (params == null) return undefined + let normalized: NormalizedUrlParams | undefined + for (const [key, value] of Object.entries(params)) { + const normalizedValue = normalizeUrlParam(value) + if (normalizedValue === undefined) continue + if (normalized == null) normalized = {} + normalized[key] = normalizedValue + } + return normalized +} + +async function readStdin(): Promise { + if (process.stdin.isTTY) return '' + + process.stdin.setEncoding('utf8') + let data = '' + + for await (const chunk of process.stdin) { + data += chunk + } + + return data +} + +function getCredentials(): { authKey: string; authSecret: string } | null { + const authKey = process.env.TRANSLOADIT_KEY || process.env.TRANSLOADIT_AUTH_KEY + const authSecret = process.env.TRANSLOADIT_SECRET || process.env.TRANSLOADIT_AUTH_SECRET + + if (!authKey || !authSecret) { + return null + } + + return { authKey, authSecret } +} + +// Result type for signature operations +type SigResult = { ok: true; output: string } | { ok: false; error: string } + +// Core logic for signature generation +function generateSignature( + input: string, + credentials: { authKey: string; authSecret: string }, + algorithm?: string, +): SigResult { + const { authKey, authSecret } = credentials + let params: CliSignatureParams + + if (input === '') { + params = { auth: { key: authKey } } + } else { + let parsed: unknown + try { + parsed = JSON.parse(input) + } catch (error) { + return { ok: false, error: `Failed to parse JSON from stdin: ${(error as Error).message}` } + } + + if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { + return { ok: false, error: 'Invalid params provided via stdin. Expected a JSON object.' } + } + + const parsedResult = cliSignatureParamsSchema.safeParse(parsed) + if (!parsedResult.success) { + return { ok: false, error: `Invalid params: ${formatIssues(parsedResult.error.issues)}` } + } + + const parsedParams = parsedResult.data + const existingAuth = parsedParams.auth ?? {} + + params = { + ...parsedParams, + auth: { + ...existingAuth, + key: authKey, + }, + } + } + + const client = new Transloadit({ authKey, authSecret }) + try { + const signature = client.calcSignature(params as OptionalAuthParams, algorithm) + return { ok: true, output: JSON.stringify(signature) } + } catch (error) { + return { ok: false, error: `Failed to generate signature: ${(error as Error).message}` } + } +} + +// Core logic for Smart CDN URL generation +function generateSmartCdnUrl( + input: string, + credentials: { authKey: string; authSecret: string }, +): SigResult { + const { authKey, authSecret } = credentials + + if (input === '') { + return { + ok: false, + error: + 'Missing params provided via stdin. Expected a JSON object with workspace, template, input, and optional Smart CDN parameters.', + } + } + + let parsed: unknown + try { + parsed = JSON.parse(input) + } catch (error) { + return { ok: false, error: `Failed to parse JSON from stdin: ${(error as Error).message}` } + } + + if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { + return { ok: false, error: 'Invalid params provided via stdin. Expected a JSON object.' } + } + + const parsedResult = smartCdnParamsSchema.safeParse(parsed) + if (!parsedResult.success) { + return { ok: false, error: `Invalid params: ${formatIssues(parsedResult.error.issues)}` } + } + + const { workspace, template, input: inputFieldRaw, url_params, expire_at_ms } = parsedResult.data + const urlParams = normalizeUrlParams(url_params) + + let expiresAt: number | undefined + if (typeof expire_at_ms === 'string') { + const parsedNumber = Number.parseInt(expire_at_ms, 10) + if (Number.isNaN(parsedNumber)) { + return { ok: false, error: 'Invalid params: expire_at_ms must be a number.' } + } + expiresAt = parsedNumber + } else { + expiresAt = expire_at_ms + } + + const inputField = typeof inputFieldRaw === 'string' ? inputFieldRaw : String(inputFieldRaw) + + const client = new Transloadit({ authKey, authSecret }) + try { + const signedUrl = client.getSignedSmartCDNUrl({ + workspace, + template, + input: inputField, + urlParams, + expiresAt, + }) + return { ok: true, output: signedUrl } + } catch (error) { + return { ok: false, error: `Failed to generate Smart CDN URL: ${(error as Error).message}` } + } +} + +// Testable helper functions exported for unit tests +export interface RunSigOptions { + providedInput?: string + algorithm?: string +} + +export interface RunSmartSigOptions { + providedInput?: string +} + +export async function runSig(options: RunSigOptions = {}): Promise { + const credentials = getCredentials() + if (credentials == null) { + console.error( + 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', + ) + process.exitCode = 1 + return + } + + const rawInput = options.providedInput ?? (await readStdin()) + const result = generateSignature(rawInput.trim(), credentials, options.algorithm) + + if (result.ok) { + process.stdout.write(`${result.output}\n`) + } else { + console.error(result.error) + process.exitCode = 1 + } +} + +export async function runSmartSig(options: RunSmartSigOptions = {}): Promise { + const credentials = getCredentials() + if (credentials == null) { + console.error( + 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', + ) + process.exitCode = 1 + return + } + + const rawInput = options.providedInput ?? (await readStdin()) + const result = generateSmartCdnUrl(rawInput.trim(), credentials) + + if (result.ok) { + process.stdout.write(`${result.output}\n`) + } else { + console.error(result.error) + process.exitCode = 1 + } +} + +/** + * Generate a signature for assembly params + */ +export class SignatureCommand extends UnauthenticatedCommand { + static override paths = [ + ['auth', 'signature'], + ['auth', 'sig'], + ['signature'], + ['sig'], // BC alias + ] + + static override usage = Command.Usage({ + category: 'Auth', + description: 'Generate a signature for assembly params', + details: ` + Read params JSON from stdin and output signed payload JSON. + If no input is provided, generates a signature with default params. + `, + examples: [ + ['Generate signature', 'echo \'{"steps":{}}\' | transloadit signature'], + ['With algorithm', 'echo \'{"steps":{}}\' | transloadit signature --algorithm sha384'], + ['Using alias', 'echo \'{"steps":{}}\' | transloadit sig'], + ], + }) + + algorithm = Option.String('--algorithm,-a', { + description: 'Signature algorithm to use (sha1, sha256, sha384, sha512)', + }) + + protected async run(): Promise { + const credentials = getCredentials() + if (credentials == null) { + this.output.error( + 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', + ) + return 1 + } + + const rawInput = await readStdin() + const result = generateSignature(rawInput.trim(), credentials, this.algorithm) + + if (result.ok) { + process.stdout.write(`${result.output}\n`) + return undefined + } + + this.output.error(result.error) + return 1 + } +} + +/** + * Generate a signed Smart CDN URL + */ +export class SmartCdnSignatureCommand extends UnauthenticatedCommand { + static override paths = [ + ['auth', 'smart-cdn'], + ['auth', 'smart_cdn'], + ['smart-cdn'], + ['smart_sig'], // BC alias + ] + + static override usage = Command.Usage({ + category: 'Auth', + description: 'Generate a signed Smart CDN URL', + details: ` + Read Smart CDN params JSON from stdin and output a signed URL. + Required fields: workspace, template, input + Optional fields: expire_at_ms, url_params + `, + examples: [ + [ + 'Generate Smart CDN URL', + 'echo \'{"workspace":"w","template":"t","input":"i"}\' | transloadit smart-cdn', + ], + [ + 'Using alias', + 'echo \'{"workspace":"w","template":"t","input":"i"}\' | transloadit smart_sig', + ], + ], + }) + + protected async run(): Promise { + const credentials = getCredentials() + if (credentials == null) { + this.output.error( + 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', + ) + return 1 + } + + const rawInput = await readStdin() + const result = generateSmartCdnUrl(rawInput.trim(), credentials) + + if (result.ok) { + process.stdout.write(`${result.output}\n`) + return undefined + } + + this.output.error(result.error) + return 1 + } +} diff --git a/src/cli/commands/bills.ts b/src/cli/commands/bills.ts new file mode 100644 index 00000000..03d0a998 --- /dev/null +++ b/src/cli/commands/bills.ts @@ -0,0 +1,91 @@ +import { Command, Option } from 'clipanion' +import { z } from 'zod' +import { tryCatch } from '../../alphalib/tryCatch.ts' +import type { Transloadit } from '../../Transloadit.ts' +import { formatAPIError } from '../helpers.ts' +import type { IOutputCtl } from '../OutputCtl.ts' +import { AuthenticatedCommand } from './BaseCommand.ts' + +// --- Types and business logic --- + +export interface BillsGetOptions { + months: string[] +} + +const BillResponseSchema = z.object({ + total: z.number(), +}) + +export async function get( + output: IOutputCtl, + client: Transloadit, + { months }: BillsGetOptions, +): Promise { + const requests = months.map((month) => client.getBill(month)) + + const [err, results] = await tryCatch(Promise.all(requests)) + if (err) { + output.error(formatAPIError(err)) + return + } + + for (const result of results) { + const parsed = BillResponseSchema.safeParse(result) + if (parsed.success) { + output.print(`$${parsed.data.total}`, result) + } else { + output.print('Unable to parse bill response', result) + } + } +} + +// --- Command class --- + +export class BillsGetCommand extends AuthenticatedCommand { + static override paths = [ + ['bills', 'get'], + ['bill', 'get'], + ['b', 'get'], + ['b', 'g'], + ] + + static override usage = Command.Usage({ + category: 'Bills', + description: 'Fetch billing information', + details: ` + Fetch billing information for the specified months. + Months should be specified in YYYY-MM format. + If no month is specified, returns the current month. + `, + examples: [ + ['Get current month billing', 'transloadit bills get'], + ['Get specific month', 'transloadit bills get 2024-01'], + ['Get multiple months', 'transloadit bills get 2024-01 2024-02'], + ], + }) + + months = Option.Rest() + + protected async run(): Promise { + const monthList: string[] = [] + + for (const month of this.months) { + if (!/^\d{4}-\d{1,2}$/.test(month)) { + this.output.error(`invalid date format '${month}' (YYYY-MM)`) + return 1 + } + monthList.push(month) + } + + // Default to current month if none specified + if (monthList.length === 0) { + const d = new Date() + monthList.push(`${d.getUTCFullYear()}-${d.getUTCMonth() + 1}`) + } + + await get(this.output, this.client, { + months: monthList, + }) + return undefined + } +} diff --git a/src/cli/commands/index.ts b/src/cli/commands/index.ts new file mode 100644 index 00000000..5837d5a9 --- /dev/null +++ b/src/cli/commands/index.ts @@ -0,0 +1,65 @@ +import { Builtins, Cli } from 'clipanion' + +import packageJson from '../../../package.json' with { type: 'json' } + +import { + AssembliesCreateCommand, + AssembliesDeleteCommand, + AssembliesGetCommand, + AssembliesListCommand, + AssembliesReplayCommand, +} from './assemblies.ts' + +import { SignatureCommand, SmartCdnSignatureCommand } from './auth.ts' + +import { BillsGetCommand } from './bills.ts' + +import { NotificationsReplayCommand } from './notifications.ts' + +import { + TemplatesCreateCommand, + TemplatesDeleteCommand, + TemplatesGetCommand, + TemplatesListCommand, + TemplatesModifyCommand, + TemplatesSyncCommand, +} from './templates.ts' + +export function createCli(): Cli { + const cli = new Cli({ + binaryLabel: 'Transloadit CLI', + binaryName: 'transloadit', + binaryVersion: packageJson.version, + }) + + // Built-in commands + cli.register(Builtins.HelpCommand) + cli.register(Builtins.VersionCommand) + + // Auth commands (signature generation) + cli.register(SignatureCommand) + cli.register(SmartCdnSignatureCommand) + + // Assemblies commands + cli.register(AssembliesCreateCommand) + cli.register(AssembliesListCommand) + cli.register(AssembliesGetCommand) + cli.register(AssembliesDeleteCommand) + cli.register(AssembliesReplayCommand) + + // Templates commands + cli.register(TemplatesCreateCommand) + cli.register(TemplatesGetCommand) + cli.register(TemplatesModifyCommand) + cli.register(TemplatesDeleteCommand) + cli.register(TemplatesListCommand) + cli.register(TemplatesSyncCommand) + + // Bills commands + cli.register(BillsGetCommand) + + // Notifications commands + cli.register(NotificationsReplayCommand) + + return cli +} diff --git a/src/cli/commands/notifications.ts b/src/cli/commands/notifications.ts new file mode 100644 index 00000000..e31b4edc --- /dev/null +++ b/src/cli/commands/notifications.ts @@ -0,0 +1,63 @@ +import { Command, Option } from 'clipanion' +import { tryCatch } from '../../alphalib/tryCatch.ts' +import type { Transloadit } from '../../Transloadit.ts' +import type { IOutputCtl } from '../OutputCtl.ts' +import { ensureError } from '../types.ts' +import { AuthenticatedCommand } from './BaseCommand.ts' + +// --- Types and business logic --- + +export interface NotificationsReplayOptions { + notify_url?: string + assemblies: string[] +} + +export async function replay( + output: IOutputCtl, + client: Transloadit, + { notify_url, assemblies }: NotificationsReplayOptions, +): Promise { + const promises = assemblies.map((id) => client.replayAssemblyNotification(id, { notify_url })) + const [err] = await tryCatch(Promise.all(promises)) + if (err) { + output.error(ensureError(err).message) + } +} + +// --- Command class --- + +export class NotificationsReplayCommand extends AuthenticatedCommand { + static override paths = [ + ['assembly-notifications', 'replay'], + ['notifications', 'replay'], + ['notification', 'replay'], + ['n', 'replay'], + ['n', 'r'], + ] + + static override usage = Command.Usage({ + category: 'Notifications', + description: 'Replay notifications for assemblies', + examples: [ + ['Replay notifications', 'transloadit assembly-notifications replay ASSEMBLY_ID'], + [ + 'Replay to a new URL', + 'transloadit assembly-notifications replay --notify-url https://example.com/notify ASSEMBLY_ID', + ], + ], + }) + + notifyUrl = Option.String('--notify-url', { + description: 'Specify a new URL to send the notifications to', + }) + + assemblyIds = Option.Rest({ required: 1 }) + + protected async run(): Promise { + await replay(this.output, this.client, { + notify_url: this.notifyUrl, + assemblies: this.assemblyIds, + }) + return undefined + } +} diff --git a/src/cli/commands/templates.ts b/src/cli/commands/templates.ts new file mode 100644 index 00000000..f0649687 --- /dev/null +++ b/src/cli/commands/templates.ts @@ -0,0 +1,556 @@ +import fsp from 'node:fs/promises' +import path from 'node:path' +import { promisify } from 'node:util' +import { Command, Option } from 'clipanion' +import rreaddir from 'recursive-readdir' +import { z } from 'zod' +import { tryCatch } from '../../alphalib/tryCatch.ts' +import type { Steps } from '../../alphalib/types/template.ts' +import { stepsSchema } from '../../alphalib/types/template.ts' +import type { TemplateContent } from '../../apiTypes.ts' +import type { Transloadit } from '../../Transloadit.ts' +import { createReadStream, formatAPIError, streamToBuffer } from '../helpers.ts' +import type { IOutputCtl } from '../OutputCtl.ts' +import ModifiedLookup from '../template-last-modified.ts' +import type { TemplateFile } from '../types.ts' +import { ensureError, isTransloaditAPIError, TemplateFileDataSchema } from '../types.ts' +import { AuthenticatedCommand } from './BaseCommand.ts' + +const rreaddirAsync = promisify(rreaddir) + +export interface TemplateCreateOptions { + name: string + file: string +} + +export interface TemplateGetOptions { + templates: string[] +} + +export interface TemplateModifyOptions { + template: string + name?: string + file: string +} + +export interface TemplateDeleteOptions { + templates: string[] +} + +export interface TemplateListOptions { + before?: string + after?: string + order?: 'asc' | 'desc' + sort?: string + fields?: string[] +} + +export interface TemplateSyncOptions { + files: string[] + recursive?: boolean +} + +export async function create( + output: IOutputCtl, + client: Transloadit, + { name, file }: TemplateCreateOptions, +): Promise { + try { + const buf = await streamToBuffer(createReadStream(file)) + + const parsed: unknown = JSON.parse(buf.toString()) + const validated = stepsSchema.safeParse(parsed) + if (!validated.success) { + throw new Error(`Invalid template steps format: ${validated.error.message}`) + } + + const result = await client.createTemplate({ + name, + // Steps (validated) is assignable to StepsInput at runtime; cast for TS + template: { steps: validated.data } as TemplateContent, + }) + output.print(result.id, result) + return result + } catch (err) { + const error = ensureError(err) + output.error(error.message) + throw err + } +} + +export async function get( + output: IOutputCtl, + client: Transloadit, + { templates }: TemplateGetOptions, +): Promise { + const requests = templates.map((template) => client.getTemplate(template)) + + const [err, results] = await tryCatch(Promise.all(requests)) + if (err) { + output.error(formatAPIError(err)) + throw err + } + + for (const result of results) { + output.print(result, result) + } +} + +export async function modify( + output: IOutputCtl, + client: Transloadit, + { template, name, file }: TemplateModifyOptions, +): Promise { + try { + const buf = await streamToBuffer(createReadStream(file)) + + let steps: Steps | null = null + let newName = name + + if (buf.length > 0) { + const parsed: unknown = JSON.parse(buf.toString()) + const validated = stepsSchema.safeParse(parsed) + if (!validated.success) { + throw new Error(`Invalid template steps format: ${validated.error.message}`) + } + steps = validated.data + } + + if (!name || buf.length === 0) { + const tpl = await client.getTemplate(template) + if (!name) newName = tpl.name + if (buf.length === 0 && tpl.content.steps) { + steps = tpl.content.steps + } + } + + if (steps === null) { + throw new Error('No steps to update template with') + } + + await client.editTemplate(template, { + name: newName, + // Steps (validated) is assignable to StepsInput at runtime; cast for TS + template: { steps } as TemplateContent, + }) + } catch (err) { + output.error(formatAPIError(err)) + throw err + } +} + +async function deleteTemplates( + output: IOutputCtl, + client: Transloadit, + { templates }: TemplateDeleteOptions, +): Promise { + await Promise.all( + templates.map(async (template) => { + const [err] = await tryCatch(client.deleteTemplate(template)) + if (err) { + output.error(formatAPIError(err)) + throw err + } + }), + ) +} + +// Export with `delete` alias for external consumers +export { deleteTemplates as delete } + +const TemplateIdSchema = z.object({ + id: z.string(), +}) + +export function list( + output: IOutputCtl, + client: Transloadit, + { before, after, order, sort, fields }: TemplateListOptions, +): void { + const stream = client.streamTemplates({ + todate: before, + fromdate: after, + order, + sort: sort as 'id' | 'name' | 'created' | 'modified' | undefined, + }) + + stream.on('readable', () => { + const template: unknown = stream.read() + if (template == null) return + + const parsed = TemplateIdSchema.safeParse(template) + if (!parsed.success) return + + if (fields == null) { + output.print(parsed.data.id, template) + } else { + const templateRecord = template as Record + output.print(fields.map((field) => templateRecord[field]).join(' '), template) + } + }) + + stream.on('error', (err: unknown) => { + output.error(formatAPIError(err)) + }) +} + +export async function sync( + output: IOutputCtl, + client: Transloadit, + { files, recursive }: TemplateSyncOptions, +): Promise { + // Promise [String] -- all files in the directory tree + const relevantFilesNested = await Promise.all( + files.map(async (file) => { + const stats = await fsp.stat(file) + if (!stats.isDirectory()) return [file] + + let children: string[] + if (recursive) { + children = (await rreaddirAsync(file)) as string[] + } else { + const list = await fsp.readdir(file) + children = list.map((child) => path.join(file, child)) + } + + if (recursive) return children + + // Filter directories if not recursive + const filtered = await Promise.all( + children.map(async (child) => { + const childStats = await fsp.stat(child) + return childStats.isDirectory() ? null : child + }), + ) + return filtered.filter((f): f is string => f !== null) + }), + ) + const relevantFiles = relevantFilesNested.flat() + + // Promise [{ file: String, data: JSON }] -- all templates + const maybeFiles = await Promise.all(relevantFiles.map(templateFileOrNull)) + const templates = maybeFiles.filter((maybeFile): maybeFile is TemplateFile => maybeFile !== null) + + async function templateFileOrNull(file: string): Promise { + if (path.extname(file) !== '.json') return null + + try { + const data = await fsp.readFile(file, 'utf8') + const parsed: unknown = JSON.parse(data) + const validated = TemplateFileDataSchema.safeParse(parsed) + if (!validated.success) return null + return 'transloadit_template_id' in validated.data ? { file, data: validated.data } : null + } catch (e) { + if (e instanceof SyntaxError) return null + throw e + } + } + + const modified = new ModifiedLookup(client) + + const [err] = await tryCatch( + Promise.all( + templates.map(async (template) => { + if (!('steps' in template.data)) { + if (!template.data.transloadit_template_id) { + throw new Error(`Template file has no id and no steps: ${template.file}`) + } + return download(template) + } + + if (!template.data.transloadit_template_id) return upload(template) + + const stats = await fsp.stat(template.file) + const fileModified = stats.mtime + + let templateModified: Date + const templateId = template.data.transloadit_template_id + try { + await client.getTemplate(templateId) + templateModified = await new Promise((resolve, reject) => + modified.byId(templateId, (err, res) => { + if (err) { + reject(err) + } else if (res) { + resolve(res) + } else { + reject(new Error('No date returned')) + } + }), + ) + } catch (err) { + if (isTransloaditAPIError(err)) { + if (err.code === 'SERVER_404' || (err.response && err.response.statusCode === 404)) { + throw new Error(`Template file references nonexistent template: ${template.file}`) + } + } + throw err + } + + if (fileModified > templateModified) return upload(template) + return download(template) + }), + ), + ) + if (err) { + output.error(err) + throw err + } + + async function upload(template: TemplateFile): Promise { + const params = { + name: path.basename(template.file, '.json'), + template: { steps: template.data.steps } as TemplateContent, + } + + if (!template.data.transloadit_template_id) { + const result = await client.createTemplate(params) + template.data.transloadit_template_id = result.id + await fsp.writeFile(template.file, JSON.stringify(template.data)) + return + } + + await client.editTemplate(template.data.transloadit_template_id, params) + } + + async function download(template: TemplateFile): Promise { + const templateId = template.data.transloadit_template_id + if (!templateId) { + throw new Error('Cannot download template without id') + } + + const result = await client.getTemplate(templateId) + + // Use empty object if template has no steps (undefined would be stripped by JSON.stringify) + template.data.steps = result.content.steps ?? {} + const file = path.join(path.dirname(template.file), `${result.name}.json`) + + await fsp.writeFile(template.file, JSON.stringify(template.data)) + + if (file !== template.file) { + await fsp.rename(template.file, file) + } + } +} +export class TemplatesCreateCommand extends AuthenticatedCommand { + static override paths = [ + ['templates', 'create'], + ['template', 'create'], + ['t', 'create'], + ['t', 'c'], + ] + + static override usage = Command.Usage({ + category: 'Templates', + description: 'Create a new template', + details: ` + Create a new template with the given name. + If FILE is not specified, reads from STDIN. + `, + examples: [ + ['Create template from file', 'transloadit templates create my-template steps.json'], + ['Create template from stdin', 'cat steps.json | transloadit templates create my-template'], + ], + }) + + name = Option.String({ required: true }) + file = Option.String({ required: false }) + + protected async run(): Promise { + await create(this.output, this.client, { + name: this.name, + file: this.file ?? '-', + }) + return undefined + } +} + +export class TemplatesGetCommand extends AuthenticatedCommand { + static override paths = [ + ['templates', 'get'], + ['template', 'get'], + ['t', 'get'], + ['t', 'g'], + ] + + static override usage = Command.Usage({ + category: 'Templates', + description: 'Retrieve the template content as JSON', + examples: [['Get a template', 'transloadit templates get TEMPLATE_ID']], + }) + + templateIds = Option.Rest({ required: 1 }) + + protected async run(): Promise { + await get(this.output, this.client, { + templates: this.templateIds, + }) + return undefined + } +} + +export class TemplatesModifyCommand extends AuthenticatedCommand { + static override paths = [ + ['templates', 'modify'], + ['template', 'modify'], + ['t', 'modify'], + ['t', 'm'], + ['templates', 'edit'], + ['template', 'edit'], + ] + + static override usage = Command.Usage({ + category: 'Templates', + description: 'Change the JSON content of a template', + details: ` + Modify an existing template. + If FILE is not specified, reads from STDIN. + `, + examples: [ + ['Modify template from file', 'transloadit templates modify TEMPLATE_ID steps.json'], + ['Rename a template', 'transloadit templates modify --name new-name TEMPLATE_ID'], + ], + }) + + newName = Option.String('--name,-n', { + description: 'A new name for the template', + }) + + templateId = Option.String({ required: true }) + file = Option.String({ required: false }) + + protected async run(): Promise { + await modify(this.output, this.client, { + template: this.templateId, + name: this.newName, + file: this.file ?? '-', + }) + return undefined + } +} + +export class TemplatesDeleteCommand extends AuthenticatedCommand { + static override paths = [ + ['templates', 'delete'], + ['template', 'delete'], + ['t', 'delete'], + ['t', 'd'], + ] + + static override usage = Command.Usage({ + category: 'Templates', + description: 'Delete templates', + examples: [['Delete a template', 'transloadit templates delete TEMPLATE_ID']], + }) + + templateIds = Option.Rest({ required: 1 }) + + protected async run(): Promise { + await deleteTemplates(this.output, this.client, { + templates: this.templateIds, + }) + return undefined + } +} + +export class TemplatesListCommand extends AuthenticatedCommand { + static override paths = [ + ['templates', 'list'], + ['template', 'list'], + ['t', 'list'], + ['t', 'l'], + ] + + static override usage = Command.Usage({ + category: 'Templates', + description: 'List templates matching given criteria', + examples: [ + ['List all templates', 'transloadit templates list'], + ['List templates sorted by name', 'transloadit templates list --sort name'], + ], + }) + + after = Option.String('--after,-a', { + description: 'Return only templates created after specified date', + }) + + before = Option.String('--before,-b', { + description: 'Return only templates created before specified date', + }) + + sort = Option.String('--sort', { + description: 'Field to sort by (id, name, created, or modified)', + }) + + order = Option.String('--order', { + description: 'Sort ascending or descending (asc or desc)', + }) + + fields = Option.String('--fields', { + description: 'Comma-separated list of fields to return for each template', + }) + + protected async run(): Promise { + if (this.sort && !['id', 'name', 'created', 'modified'].includes(this.sort)) { + this.output.error('invalid argument for --sort') + return 1 + } + + if (this.order && !['asc', 'desc'].includes(this.order)) { + this.output.error('invalid argument for --order') + return 1 + } + + const fieldList = this.fields ? this.fields.split(',') : undefined + + await list(this.output, this.client, { + after: this.after, + before: this.before, + sort: this.sort, + order: this.order as 'asc' | 'desc' | undefined, + fields: fieldList, + }) + return undefined + } +} + +export class TemplatesSyncCommand extends AuthenticatedCommand { + static override paths = [ + ['templates', 'sync'], + ['template', 'sync'], + ['t', 'sync'], + ['t', 's'], + ] + + static override usage = Command.Usage({ + category: 'Templates', + description: 'Synchronize local template files with the Transloadit API', + details: ` + Template files must be named *.json and have the key "transloadit_template_id" + and optionally "steps". If "transloadit_template_id" is an empty string, then + a new template will be created using the instructions in "steps". If "steps" is + missing then it will be filled in by the instructions of the template specified + by "transloadit_template_id". If both keys are present then the local template + file and the remote template will be synchronized to whichever was more recently + modified. + `, + examples: [ + ['Sync templates in a directory', 'transloadit templates sync templates/'], + ['Sync recursively', 'transloadit templates sync --recursive templates/'], + ], + }) + + recursive = Option.Boolean('--recursive,-r', false, { + description: 'Look for template files in directories recursively', + }) + + files = Option.Rest() + + protected async run(): Promise { + await sync(this.output, this.client, { + recursive: this.recursive, + files: this.files, + }) + return undefined + } +} diff --git a/src/cli/helpers.ts b/src/cli/helpers.ts new file mode 100644 index 00000000..ef65b9f9 --- /dev/null +++ b/src/cli/helpers.ts @@ -0,0 +1,41 @@ +import fs from 'node:fs' +import type { Readable } from 'node:stream' +import type { APIError } from './types.ts' +import { isAPIError } from './types.ts' + +export function createReadStream(file: string): Readable { + if (file === '-') return process.stdin + return fs.createReadStream(file) +} + +export async function streamToBuffer(stream: Readable): Promise { + const chunks: Buffer[] = [] + for await (const chunk of stream) { + chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)) + } + return Buffer.concat(chunks) +} + +export function formatAPIError(err: unknown): string { + if (isAPIError(err)) { + return `${err.error}: ${err.message}` + } + if (err instanceof Error) { + return err.message + } + return String(err) +} + +// Re-export APIError type for convenience +export type { APIError } + +export function zip(listA: A[], listB: B[]): [A, B][] +export function zip(...lists: T[][]): T[][] +export function zip(...lists: T[][]): T[][] { + const length = Math.max(...lists.map((list) => list.length)) + const result: T[][] = new Array(length) + for (let i = 0; i < result.length; i++) { + result[i] = lists.map((list) => list[i] as T) + } + return result +} diff --git a/src/cli/template-last-modified.ts b/src/cli/template-last-modified.ts new file mode 100644 index 00000000..eae0718a --- /dev/null +++ b/src/cli/template-last-modified.ts @@ -0,0 +1,156 @@ +import type { Transloadit } from '../Transloadit.ts' +import { ensureError } from './types.ts' + +interface TemplateItem { + id: string + modified: string +} + +type FetchCallback = (err: Error | null, result?: T) => void +type PageFetcher = (page: number, pagesize: number, cb: FetchCallback) => void + +class MemoizedPagination { + private pagesize: number + private fetch: PageFetcher + private cache: (T | undefined)[] + + constructor(pagesize: number, fetch: PageFetcher) { + this.pagesize = pagesize + this.fetch = fetch + this.cache = [] + } + + get(i: number, cb: FetchCallback): void { + const cached = this.cache[i] + if (cached !== undefined) { + process.nextTick(() => cb(null, cached)) + return + } + + const page = Math.floor(i / this.pagesize) + 1 + const start = (page - 1) * this.pagesize + + this.fetch(page, this.pagesize, (err, result) => { + if (err) { + cb(err) + return + } + if (!result) { + cb(new Error('No result returned from fetch')) + return + } + for (let j = 0; j < this.pagesize; j++) { + this.cache[start + j] = result[j] + } + cb(null, this.cache[i]) + }) + } +} + +export default class ModifiedLookup { + private byOrdinal: MemoizedPagination + + constructor(client: Transloadit, pagesize = 50) { + this.byOrdinal = new MemoizedPagination(pagesize, (page, pagesize, cb) => { + const params = { + sort: 'id' as const, + order: 'asc' as const, + fields: ['id', 'modified'] as ('id' | 'modified')[], + page, + pagesize, + } + + client + .listTemplates(params) + .then((result) => { + const items: TemplateItem[] = new Array(pagesize) + // Fill with sentinel value larger than any hex ID + items.fill({ id: 'gggggggggggggggggggggggggggggggg', modified: '' }) + for (let i = 0; i < result.items.length; i++) { + const item = result.items[i] + if (item) { + items[i] = { id: item.id, modified: item.modified } + } + } + cb(null, items) + }) + .catch((err: unknown) => { + cb(ensureError(err)) + }) + }) + } + + private idByOrd(ord: number, cb: FetchCallback): void { + this.byOrdinal.get(ord, (err, result) => { + if (err) { + cb(err) + return + } + if (!result) { + cb(new Error('No result found')) + return + } + cb(null, result.id) + }) + } + + byId(id: string, cb: FetchCallback): void { + const findUpperBound = (bound: number): void => { + this.idByOrd(bound, (err, idAtBound) => { + if (err) { + cb(err) + return + } + if (idAtBound === id) { + complete(bound) + return + } + if (idAtBound && idAtBound > id) { + refine(Math.floor(bound / 2), bound) + return + } + findUpperBound(bound * 2) + }) + } + + const refine = (lower: number, upper: number): void => { + if (lower >= upper - 1) { + cb(new Error(`Template ID ${id} not found in ModifiedLookup`)) + return + } + + const middle = Math.floor((lower + upper) / 2) + this.idByOrd(middle, (err, idAtMiddle) => { + if (err) { + cb(err) + return + } + if (idAtMiddle === id) { + complete(middle) + return + } + if (idAtMiddle && idAtMiddle < id) { + refine(middle, upper) + return + } + refine(lower, middle) + }) + } + + const complete = (ord: number): void => { + this.byOrdinal.get(ord, (err, result) => { + if (err) { + cb(err) + return + } + if (!result) { + cb(new Error('No result found')) + return + } + cb(null, new Date(result.modified)) + }) + } + + findUpperBound(1) + } +} diff --git a/src/cli/types.ts b/src/cli/types.ts new file mode 100644 index 00000000..99c3821a --- /dev/null +++ b/src/cli/types.ts @@ -0,0 +1,183 @@ +import { z } from 'zod' +import type { Steps } from '../alphalib/types/template.ts' +import { optionalStepsSchema } from '../alphalib/types/template.ts' +import type { BillResponse, ListedTemplate, TemplateResponse } from '../apiTypes.ts' +import type { AssemblyStatus, Transloadit } from '../Transloadit.ts' +import type { IOutputCtl } from './OutputCtl.ts' + +// Re-export transloadit types for CLI use +export type { AssemblyStatus, BillResponse, ListedTemplate, TemplateResponse } +export type { Transloadit } +export type { CreateAssemblyOptions } from '../Transloadit.ts' + +// Zod schemas for runtime validation +export const APIErrorSchema = z.object({ + error: z.string(), + message: z.string(), +}) +export type APIError = z.infer + +export const TransloaditAPIErrorSchema = z.object({ + error: z.string().optional(), + message: z.string(), + code: z.string().optional(), + transloaditErrorCode: z.string().optional(), + response: z + .object({ + body: z + .object({ + error: z.string().optional(), + }) + .optional(), + statusCode: z.number().optional(), + }) + .optional(), +}) +export type TransloaditAPIError = z.infer + +// Template file data - explicit type to avoid TS inference limits +export interface TemplateFileData { + transloadit_template_id?: string + steps?: Steps + [key: string]: unknown // passthrough +} + +export const TemplateFileDataSchema: z.ZodType = z + .object({ + transloadit_template_id: z.string().optional(), + steps: optionalStepsSchema, + }) + .passthrough() as z.ZodType + +export interface TemplateFile { + file: string + data: TemplateFileData +} + +// Template list item (from API) +export interface TemplateListItem { + id: string + modified: string + name?: string +} + +// CLI Invocation types +export interface BaseInvocation { + error?: boolean + message?: string + mode: string + action?: string + logLevel?: number + jsonMode?: boolean +} + +export interface AssemblyInvocation extends BaseInvocation { + mode: 'assemblies' + action?: 'create' | 'get' | 'list' | 'delete' | 'replay' + inputs: string[] + output?: string + recursive?: boolean + watch?: boolean + del?: boolean + reprocessStale?: boolean + steps?: string + template?: string + fields?: Record + assemblies?: string[] + before?: string + after?: string + keywords?: string[] + notify_url?: string + reparse?: boolean +} + +export interface TemplateInvocation extends BaseInvocation { + mode: 'templates' + action?: 'create' | 'get' | 'list' | 'delete' | 'modify' | 'sync' + templates?: string[] + template?: string + name?: string + file?: string + files?: string[] + before?: string + after?: string + order?: 'asc' | 'desc' + sort?: string + fields?: string[] + recursive?: boolean +} + +export interface BillInvocation extends BaseInvocation { + mode: 'bills' + action?: 'get' + months: string[] +} + +export interface NotificationInvocation extends BaseInvocation { + mode: 'assembly-notifications' + action?: 'list' | 'replay' + assemblies?: string[] + notify_url?: string + type?: string + assembly_id?: string + pagesize?: number +} + +export interface HelpInvocation extends BaseInvocation { + mode: 'help' | 'version' | 'register' +} + +export type Invocation = + | AssemblyInvocation + | TemplateInvocation + | BillInvocation + | NotificationInvocation + | HelpInvocation + +// Command handler type +export type CommandHandler = ( + output: IOutputCtl, + client: Transloadit | undefined, + invocation: T, +) => void | Promise + +// Type guard for Error +export function isError(value: unknown): value is Error { + return value instanceof Error +} + +// Helper to ensure error is Error type +export function ensureError(value: unknown): Error { + if (value instanceof Error) { + return value + } + return new Error(`Non-error was thrown: ${String(value)}`) +} + +// Type guard for APIError +export function isAPIError(value: unknown): value is APIError { + return APIErrorSchema.safeParse(value).success +} + +// Type guard for TransloaditAPIError +export function isTransloaditAPIError(value: unknown): value is TransloaditAPIError { + return TransloaditAPIErrorSchema.safeParse(value).success +} + +// Type guard for NodeJS.ErrnoException +export function isErrnoException(value: unknown): value is NodeJS.ErrnoException { + return value instanceof Error && 'code' in value +} + +// Safe array access helper +export function safeGet(arr: T[], index: number): T | undefined { + return arr[index] +} + +// Assert defined helper +export function assertDefined(value: T | undefined | null, message: string): T { + if (value === undefined || value === null) { + throw new Error(message) + } + return value +} diff --git a/src/tus.ts b/src/tus.ts index 80be39d1..5f4a1b3c 100644 --- a/src/tus.ts +++ b/src/tus.ts @@ -3,7 +3,8 @@ import { basename } from 'node:path' import type { Readable } from 'node:stream' import debug from 'debug' import pMap from 'p-map' -import { type OnSuccessPayload, Upload, type UploadOptions } from 'tus-js-client' +import type { OnSuccessPayload, UploadOptions } from 'tus-js-client' +import { Upload } from 'tus-js-client' import type { AssemblyStatus } from './alphalib/types/assemblyStatus.ts' import type { UploadProgress } from './Transloadit.ts' @@ -20,6 +21,7 @@ interface SendTusRequestOptions { requestedChunkSize: number uploadConcurrency: number onProgress: (options: UploadProgress) => void + signal?: AbortSignal } export async function sendTusRequest({ @@ -28,6 +30,7 @@ export async function sendTusRequest({ requestedChunkSize, uploadConcurrency, onProgress, + signal, }: SendTusRequestOptions) { const streamLabels = Object.keys(streamsMap) @@ -42,6 +45,9 @@ export async function sendTusRequest({ await pMap( streamLabels, async (label) => { + // Check if aborted before each operation + if (signal?.aborted) throw new Error('Upload aborted') + const streamInfo = streamsMap[label] if (!streamInfo) { throw new Error(`Stream info not found for label: ${label}`) @@ -54,7 +60,7 @@ export async function sendTusRequest({ totalBytes += size } }, - { concurrency: 5 }, + { concurrency: 5, signal }, ) const uploadProgresses: Record = {} @@ -102,12 +108,29 @@ export async function sendTusRequest({ const filename = path ? basename(path) : label - await new Promise((resolve, reject) => { + await new Promise((resolvePromise, rejectPromise) => { if (!assembly.assembly_ssl_url) { - reject(new Error('assembly_ssl_url is not present in the assembly status')) + rejectPromise(new Error('assembly_ssl_url is not present in the assembly status')) return } + // Check if already aborted before starting + if (signal?.aborted) { + rejectPromise(new Error('Upload aborted')) + return + } + + // Wrap resolve/reject to clean up abort listener + let abortHandler: (() => void) | undefined + const resolve = (payload: OnSuccessPayload) => { + if (abortHandler) signal?.removeEventListener('abort', abortHandler) + resolvePromise(payload) + } + const reject = (err: unknown) => { + if (abortHandler) signal?.removeEventListener('abort', abortHandler) + rejectPromise(err) + } + const tusOptions: UploadOptions = { endpoint: assembly.tus_url, metadata: { @@ -126,11 +149,20 @@ export async function sendTusRequest({ const tusUpload = new Upload(stream, tusOptions) + // Handle abort signal + if (signal) { + abortHandler = () => { + tusUpload.abort() + reject(new Error('Upload aborted')) + } + signal.addEventListener('abort', abortHandler, { once: true }) + } + tusUpload.start() }) log(label, 'upload done') } - await pMap(streamLabels, uploadSingleStream, { concurrency: uploadConcurrency }) + await pMap(streamLabels, uploadSingleStream, { concurrency: uploadConcurrency, signal }) } diff --git a/test/e2e/cli/OutputCtl.ts b/test/e2e/cli/OutputCtl.ts new file mode 100644 index 00000000..1962c541 --- /dev/null +++ b/test/e2e/cli/OutputCtl.ts @@ -0,0 +1,57 @@ +import type { LogLevelValue, OutputCtlOptions } from '../../../src/cli/OutputCtl.ts' +import { LOG_LEVEL_DEFAULT } from '../../../src/cli/OutputCtl.ts' + +interface OutputEntry { + type: 'error' | 'warn' | 'notice' | 'info' | 'debug' | 'trace' | 'print' + msg: unknown + json?: unknown +} + +/** + * Test version of OutputCtl that captures output for verification + * instead of writing to console. Implements the same interface as src/cli/OutputCtl. + */ +export default class OutputCtl { + private output: OutputEntry[] + // These properties are required by the src/cli/OutputCtl interface but not used in tests + private json: boolean + private logLevel: LogLevelValue + + constructor({ logLevel = LOG_LEVEL_DEFAULT, jsonMode = false }: OutputCtlOptions = {}) { + this.output = [] + this.json = jsonMode + this.logLevel = logLevel + } + + error(msg: unknown): void { + this.output.push({ type: 'error', msg }) + } + + warn(msg: unknown): void { + this.output.push({ type: 'warn', msg }) + } + + notice(msg: unknown): void { + this.output.push({ type: 'notice', msg }) + } + + info(msg: unknown): void { + this.output.push({ type: 'info', msg }) + } + + debug(msg: unknown): void { + this.output.push({ type: 'debug', msg }) + } + + trace(msg: unknown): void { + this.output.push({ type: 'trace', msg }) + } + + print(msg: unknown, json?: unknown): void { + this.output.push({ type: 'print', msg, json }) + } + + get(debug = false): OutputEntry[] { + return this.output.filter((line) => debug || line.type !== 'debug') + } +} diff --git a/test/e2e/cli/assemblies-create.test.ts b/test/e2e/cli/assemblies-create.test.ts new file mode 100644 index 00000000..a675d1f7 --- /dev/null +++ b/test/e2e/cli/assemblies-create.test.ts @@ -0,0 +1,451 @@ +import crypto from 'node:crypto' +import fsp from 'node:fs/promises' +import process from 'node:process' +import { promisify } from 'node:util' +import { imageSize } from 'image-size' +import rreaddir from 'recursive-readdir' +import { describe, expect, it } from 'vitest' +import { create as assembliesCreate } from '../../../src/cli/commands/assemblies.ts' +import OutputCtl from './OutputCtl.ts' +import type { OutputEntry } from './test-utils.ts' +import { testCase } from './test-utils.ts' + +const rreaddirAsync = promisify(rreaddir) + +describe('assemblies', () => { + describe('create', () => { + const genericImg = 'https://placehold.co/100.jpg' + + async function imgPromise(fname = 'in.jpg'): Promise { + const response = await fetch(genericImg) + if (!response.ok) { + throw new Error(`Failed to fetch image: ${response.status}`) + } + const buffer = Buffer.from(await response.arrayBuffer()) + await fsp.writeFile(fname, buffer) + return fname + } + + const genericSteps = { + resize: { + robot: '/image/resize', + use: ':original', + result: true, + width: 130, + height: 130, + }, + } + + async function stepsPromise( + _fname = 'steps.json', + steps: Record = genericSteps, + ): Promise { + await fsp.writeFile('steps.json', JSON.stringify(steps)) + return 'steps.json' + } + + it( + 'should transcode a file', + testCase(async (client) => { + const infile = await imgPromise() + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: [infile], + output: 'out.jpg', + }) + const result = output.get(true) as OutputEntry[] + + expect(result.length).to.be.at.least(3) + const msgs = result.map((r) => r.msg) + expect(msgs).to.include('GOT JOB in.jpg out.jpg') + expect(msgs).to.include('DOWNLOADING') + expect(msgs).to.include('COMPLETED in.jpg out.jpg') + + const imgBuffer = await fsp.readFile('out.jpg') + const dim = imageSize(new Uint8Array(imgBuffer)) + expect(dim).to.have.property('width').that.equals(130) + expect(dim).to.have.property('height').that.equals(130) + }), + ) + + it( + 'should download file with correct md5 hash', + testCase(async (client) => { + const infile = await imgPromise() + const steps = await stepsPromise() + + const output = new OutputCtl() + const { results } = await assembliesCreate(output, client, { + steps, + inputs: [infile], + output: 'out-md5.jpg', + }) + + // Get the assembly result to find the expected md5hash + // The results array contains assembly statuses + const assemblyResult = results[0] as { + results?: Record> + } + expect(assemblyResult).to.have.property('results') + const resultSteps = Object.values(assemblyResult.results ?? {}) + expect(resultSteps.length).to.be.greaterThan(0) + const firstResult = resultSteps[0]?.[0] + expect(firstResult).to.have.property('md5hash') + const expectedMd5 = firstResult?.md5hash + + // Calculate md5 of downloaded file + const downloadedBuffer = await fsp.readFile('out-md5.jpg') + const actualMd5 = crypto.createHash('md5').update(downloadedBuffer).digest('hex') + + expect(actualMd5).to.equal(expectedMd5) + }), + ) + + it( + 'should handle multiple inputs', + testCase(async (client) => { + const infiles = await Promise.all(['in1.jpg', 'in2.jpg', 'in3.jpg'].map(imgPromise)) + const steps = await stepsPromise() + await fsp.mkdir('out') + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: infiles, + output: 'out', + }) + + const outs = await fsp.readdir('out') + expect(outs[0]).to.equal('in1.jpg') + expect(outs[1]).to.equal('in2.jpg') + expect(outs[2]).to.equal('in3.jpg') + expect(outs).to.have.lengthOf(3) + }), + ) + + it( + 'should not output outside outdir', + testCase(async (client) => { + await fsp.mkdir('sub') + process.chdir('sub') + + const infile = await imgPromise('../in.jpg') + await fsp.mkdir('out') + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: [infile], + output: 'out', + }) + + const outs = await fsp.readdir('out') + expect(outs[0]).to.equal('in.jpg') + expect(outs).to.have.lengthOf(1) + + const ls = await fsp.readdir('.') + expect(ls).to.not.contain('in.jpg') + }), + ) + + it( + 'should structure output directory correctly', + testCase(async (client) => { + await fsp.mkdir('in') + await fsp.mkdir('in/sub') + await Promise.all(['1.jpg', 'in/2.jpg', 'in/sub/3.jpg'].map(imgPromise)) + await fsp.mkdir('out') + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + recursive: true, + steps, + inputs: ['1.jpg', 'in'], + output: 'out', + }) + + const outs = await rreaddirAsync('out') + expect(outs).to.include('out/1.jpg') + expect(outs).to.include('out/2.jpg') + expect(outs).to.include('out/sub/3.jpg') + expect(outs).to.have.lengthOf(3) + }), + ) + + it( + 'should not be recursive by default', + testCase(async (client) => { + await fsp.mkdir('in') + await fsp.mkdir('in/sub') + await Promise.all(['in/2.jpg', 'in/sub/3.jpg'].map(imgPromise)) + await fsp.mkdir('out') + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: ['in'], + output: 'out', + }) + + const outs = await rreaddirAsync('out') + expect(outs).to.include('out/2.jpg') + expect(outs).to.not.include('out/sub/3.jpg') + expect(outs).to.have.lengthOf(1) + }), + ) + + it( + 'should be able to handle directories recursively', + testCase(async (client) => { + await fsp.mkdir('in') + await fsp.mkdir('in/sub') + await Promise.all(['in/2.jpg', 'in/sub/3.jpg'].map(imgPromise)) + await fsp.mkdir('out') + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + recursive: true, + steps, + inputs: ['in'], + output: 'out', + }) + + const outs = await rreaddirAsync('out') + expect(outs).to.include('out/2.jpg') + expect(outs).to.include('out/sub/3.jpg') + expect(outs).to.have.lengthOf(2) + }), + ) + + it( + 'should detect outdir conflicts', + testCase(async (client) => { + await fsp.mkdir('in') + await Promise.all(['1.jpg', 'in/1.jpg'].map(imgPromise)) + await fsp.mkdir('out') + const steps = await stepsPromise() + + const output = new OutputCtl() + try { + await assembliesCreate(output, client, { + steps, + inputs: ['1.jpg', 'in'], + output: 'out', + }) + throw new Error('assembliesCreate didnt err; should have') + } catch (_err) { + const result = output.get() as OutputEntry[] + expect(result[result.length - 1]) + .to.have.property('type') + .that.equals('error') + expect(result[result.length - 1]) + .to.have.nested.property('msg.message') + .that.equals("Output collision between 'in/1.jpg' and '1.jpg'") + } + // Small delay to allow abort signals to propagate and streams to close + await new Promise((resolve) => setTimeout(resolve, 50)) + }), + ) + + it( + 'should not download the result if no output is specified', + testCase(async (client) => { + const infile = await imgPromise() + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: [infile], + output: null, + }) + const result = output.get(true) as OutputEntry[] + + // When no output is specified, we might still get debug messages but no actual downloads + const downloadingMsgs = result.filter((line) => String(line.msg) === 'DOWNLOADING') + expect(downloadingMsgs.length).to.be.lessThanOrEqual(1) + }), + ) + + it( + 'should accept invocations with no inputs', + testCase(async (client) => { + await imgPromise() + const steps = await stepsPromise('steps.json', { + import: { + robot: '/http/import', + url: genericImg, + }, + resize: { + robot: '/image/resize', + use: 'import', + result: true, + width: 130, + height: 130, + }, + }) + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: [], + output: 'out.jpg', + }) + + await fsp.access('out.jpg') + }), + ) + + it( + 'should allow deleting inputs after processing', + testCase(async (client) => { + const infile = await imgPromise() + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: [infile], + output: null, + del: true, + }) + + try { + await fsp.access(infile) + throw new Error('File should have been deleted') + } catch (err) { + expect((err as NodeJS.ErrnoException).code).to.equal('ENOENT') + } + }), + ) + + it( + 'should not reprocess inputs that are older than their output', + testCase(async (client) => { + const infiles = await Promise.all(['in1.jpg', 'in2.jpg', 'in3.jpg'].map(imgPromise)) + const steps = await stepsPromise() + await fsp.mkdir('out') + + const output1 = new OutputCtl() + await assembliesCreate(output1, client, { + steps, + inputs: [infiles[0] as string], + output: 'out', + }) + + const output2 = new OutputCtl() + await assembliesCreate(output2, client, { + steps, + inputs: infiles, + output: 'out', + }) + const result = output2.get(true) as OutputEntry[] + + expect( + result.map((line) => line.msg).filter((msg) => String(msg).includes('in1.jpg')), + ).to.have.lengthOf(0) + }), + ) + + it( + 'should process many files with concurrency limiting', + testCase(async (client) => { + // Create 6 input files + const fileCount = 6 + const infiles = await Promise.all( + Array.from({ length: fileCount }, (_, i) => imgPromise(`in${i}.jpg`)), + ) + const steps = await stepsPromise() + await fsp.mkdir('out') + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: infiles, + output: 'out', + concurrency: 2, // Only process 2 at a time + }) + + // Verify all files were processed successfully + const outs = await fsp.readdir('out') + expect(outs).to.have.lengthOf(fileCount) + + // Analyze debug output to verify concurrency limiting was applied. + // The fixed code emits "PROCESSING JOB" when jobs start (up to concurrency limit). + // The unfixed code has no such limiting - all jobs start at once with "GOT JOB". + const debugOutput = output.get(true) as OutputEntry[] + const messages = debugOutput.map((e) => String(e.msg)) + + // Check that "PROCESSING JOB" messages exist (added by the fix) + const processingMessages = messages.filter((m) => m.startsWith('PROCESSING JOB')) + expect( + processingMessages.length, + 'Expected "PROCESSING JOB" debug messages from concurrency-limited processing', + ).to.be.greaterThan(0) + + // Track max concurrent jobs by counting "PROCESSING JOB" vs "COMPLETED" messages + let activeJobs = 0 + let maxActiveJobs = 0 + for (const msg of messages) { + if (msg.startsWith('PROCESSING JOB')) { + activeJobs++ + maxActiveJobs = Math.max(maxActiveJobs, activeJobs) + } else if (msg.startsWith('COMPLETED')) { + activeJobs-- + } + } + + // With concurrency=2, we should never have more than 2 jobs processing at once + expect( + maxActiveJobs, + 'Max concurrent jobs should not exceed concurrency limit', + ).to.be.at.most(2) + }), + ) + + it( + 'should close streams immediately in single-assembly mode', + testCase(async (client) => { + // Create multiple input files for single-assembly mode + const fileCount = 5 + const infiles = await Promise.all( + Array.from({ length: fileCount }, (_, i) => imgPromise(`in${i}.jpg`)), + ) + const steps = await stepsPromise() + await fsp.mkdir('out') + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: infiles, + output: 'out', + singleAssembly: true, // All files in one assembly + }) + + // Verify files were processed + const outs = await fsp.readdir('out') + expect(outs.length).to.be.greaterThan(0) + + // Analyze debug output to verify streams were handled properly. + // The fixed code emits "STREAM CLOSED" when closing streams during collection. + // The unfixed code keeps all streams open until upload, risking fd exhaustion. + const debugOutput = output.get(true) as OutputEntry[] + const messages = debugOutput.map((e) => String(e.msg)) + + // Check that streams were closed during collection (added by the fix) + const streamClosedMessages = messages.filter((m) => m.startsWith('STREAM CLOSED')) + expect( + streamClosedMessages.length, + 'Expected "STREAM CLOSED" messages indicating proper fd management', + ).to.be.greaterThan(0) + }), + ) + }) +}) diff --git a/test/e2e/cli/assemblies-list.test.ts b/test/e2e/cli/assemblies-list.test.ts new file mode 100644 index 00000000..48391596 --- /dev/null +++ b/test/e2e/cli/assemblies-list.test.ts @@ -0,0 +1,19 @@ +import { describe, expect, it } from 'vitest' +import * as assemblies from '../../../src/cli/commands/assemblies.ts' +import OutputCtl from './OutputCtl.ts' +import type { OutputEntry } from './test-utils.ts' +import { testCase } from './test-utils.ts' + +describe('assemblies', () => { + describe('list', () => { + it( + 'should list assemblies', + testCase(async (client) => { + const output = new OutputCtl() + await assemblies.list(output, client, { pagesize: 1 }) + const logs = output.get() as OutputEntry[] + expect(logs.filter((l) => l.type === 'error')).to.have.lengthOf(0) + }), + ) + }) +}) diff --git a/test/e2e/cli/assemblies.test.ts b/test/e2e/cli/assemblies.test.ts new file mode 100644 index 00000000..0264f2ba --- /dev/null +++ b/test/e2e/cli/assemblies.test.ts @@ -0,0 +1,155 @@ +import fsp from 'node:fs/promises' +import { describe, expect, it } from 'vitest' +import * as assemblies from '../../../src/cli/commands/assemblies.ts' +import { zip } from '../../../src/cli/helpers.ts' +import OutputCtl from './OutputCtl.ts' +import type { OutputEntry } from './test-utils.ts' +import { testCase } from './test-utils.ts' + +describe('assemblies', () => { + describe('get', () => { + it( + 'should get assemblies', + testCase(async (client) => { + const response = await client.listAssemblies({ + pagesize: 5, + type: 'completed', + }) + const assemblyList = response.items + if (assemblyList.length === 0) throw new Error('account has no assemblies to fetch') + + const expectations = await Promise.all( + assemblyList.map((assembly) => client.getAssembly(assembly.id)), + ) + + const actuals = await Promise.all( + assemblyList.map(async (assembly) => { + const output = new OutputCtl() + await assemblies.get(output, client, { assemblies: [assembly.id] }) + return output.get() as OutputEntry[] + }), + ) + + for (const [expectation, actual] of zip(expectations, actuals)) { + expect(actual).to.have.lengthOf(1) + expect(actual).to.have.nested.property('[0].type').that.equals('print') + expect(actual).to.have.nested.property('[0].json').that.deep.equals(expectation) + } + }), + ) + + it( + 'should return assemblies in the order specified', + testCase(async (client) => { + const response = await client.listAssemblies({ pagesize: 5 }) + const assemblyList = response.items.sort(() => 2 * Math.floor(Math.random() * 2) - 1) + if (assemblyList.length === 0) throw new Error('account has no assemblies to fetch') + + const ids = assemblyList.map((assembly) => assembly.id) + + const output = new OutputCtl() + await assemblies.get(output, client, { assemblies: ids }) + const results = output.get() as OutputEntry[] + + try { + expect(results).to.have.lengthOf(ids.length) + } catch (e) { + console.error('DEBUG: Results:', JSON.stringify(results, null, 2)) + console.error('DEBUG: Ids:', JSON.stringify(ids, null, 2)) + throw e + } + for (const [result, id] of zip(results, ids)) { + expect(result).to.have.property('type').that.equals('print') + expect(result).to.have.nested.property('json.assembly_id').that.equals(id) + } + }), + ) + }) + + describe('delete', () => { + it( + 'should delete assemblies', + testCase(async (client) => { + const assembly = await client.createAssembly({ + params: { + steps: { import: { robot: '/http/import', url: 'https://placehold.co/100.jpg' } }, + }, + }) + + const output = new OutputCtl() + const assemblyId = assembly.assembly_id as string + await assemblies.delete(output, client, { assemblies: [assemblyId] }) + const res = await client.getAssembly(assemblyId) + expect(res.ok).to.equal('ASSEMBLY_CANCELED') + }), + ) + }) + + describe('replay', () => { + it( + 'should replay assemblies without steps (uses original)', + testCase(async (client) => { + const assembly = await client.createAssembly({ + params: { + steps: { import: { robot: '/http/import', url: 'https://placehold.co/100.jpg' } }, + }, + }) + + const output = new OutputCtl() + const assemblyId = assembly.assembly_id as string + await assemblies.replay(output, client, { + assemblies: [assemblyId], + steps: undefined, + }) + const logs = output.get() as OutputEntry[] + expect(logs.filter((l) => l.type === 'error')).to.have.lengthOf(0) + }), + ) + + it( + 'should replay assemblies with steps override', + testCase(async (client) => { + // Create an assembly with 100x100 resize + const assembly = await client.createAssembly({ + params: { + steps: { + import: { robot: '/http/import', url: 'https://placehold.co/100.jpg' }, + resize: { + robot: '/image/resize', + use: 'import', + result: true, + width: 50, + height: 50, + }, + }, + }, + }) + + // Create steps file with different dimensions (80x80) + const overrideSteps = { + import: { robot: '/http/import', url: 'https://placehold.co/100.jpg' }, + resize: { + robot: '/image/resize', + use: 'import', + result: true, + width: 80, + height: 80, + }, + } + await fsp.writeFile('override-steps.json', JSON.stringify(overrideSteps)) + + const output = new OutputCtl() + const assemblyId = assembly.assembly_id as string + await assemblies.replay(output, client, { + assemblies: [assemblyId], + steps: 'override-steps.json', + }) + const logs = output.get() as OutputEntry[] + expect(logs.filter((l) => l.type === 'error')).to.have.lengthOf(0) + + // Note: We can't easily verify the output dimensions here without downloading, + // but the test verifies the steps file is parsed and sent without errors + }), + ) + }) +}) diff --git a/test/e2e/cli/bills.test.ts b/test/e2e/cli/bills.test.ts new file mode 100644 index 00000000..7956637a --- /dev/null +++ b/test/e2e/cli/bills.test.ts @@ -0,0 +1,22 @@ +import { describe, expect, it } from 'vitest' +import * as bills from '../../../src/cli/commands/bills.ts' +import OutputCtl from './OutputCtl.ts' +import type { OutputEntry } from './test-utils.ts' +import { testCase } from './test-utils.ts' + +describe('bills', () => { + describe('get', () => { + it( + 'should get bills', + testCase(async (client) => { + const output = new OutputCtl() + const date = new Date() + const month = `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}` + await bills.get(output, client, { months: [month] }) + const logs = output.get() as OutputEntry[] + expect(logs.filter((l) => l.type === 'error')).to.have.lengthOf(0) + expect(logs.filter((l) => l.type === 'print')).to.have.length.above(0) + }), + ) + }) +}) diff --git a/test/e2e/cli/cli.test.ts b/test/e2e/cli/cli.test.ts new file mode 100644 index 00000000..0b9cfe90 --- /dev/null +++ b/test/e2e/cli/cli.test.ts @@ -0,0 +1,10 @@ +import { describe, expect, it } from 'vitest' +import { runCli } from './test-utils.ts' + +describe('CLI', () => { + it('should list templates via CLI', async () => { + const { stdout, stderr } = await runCli('templates list') + expect(stderr).to.be.empty + expect(stdout).to.match(/[a-f0-9]{32}/) + }) +}) diff --git a/test/e2e/cli/templates.test.ts b/test/e2e/cli/templates.test.ts new file mode 100644 index 00000000..022f1cfc --- /dev/null +++ b/test/e2e/cli/templates.test.ts @@ -0,0 +1,334 @@ +import fsp from 'node:fs/promises' +import path from 'node:path' +import { afterAll, beforeAll, describe, expect, it } from 'vitest' +import type { TemplateContent } from '../../../src/apiTypes.ts' +import * as templates from '../../../src/cli/commands/templates.ts' +import { zip } from '../../../src/cli/helpers.ts' +import { Transloadit as TransloaditClient } from '../../../src/Transloadit.ts' +import OutputCtl from './OutputCtl.ts' +import type { OutputEntry } from './test-utils.ts' +import { authKey, authSecret, delay, testCase } from './test-utils.ts' + +describe('templates', () => { + // Use unique prefix for all template names to avoid conflicts between test runs + const testId = `${Date.now()}-${Math.random().toString(36).slice(2, 8)}` + + describe('create', () => { + it( + 'should create templates', + testCase(async (client) => { + const executions = [1, 2, 3, 4, 5].map(async (n) => { + const output = new OutputCtl() + const steps = { import: { robot: '/http/import', url: `https://example.com/${n}` } } + await fsp.writeFile(`${n}.json`, JSON.stringify(steps)) + await templates.create(output, client, { name: `test-${testId}-${n}`, file: `${n}.json` }) + return output.get() as OutputEntry[] + }) + + const results = await Promise.all(executions) + for (const result of results) { + expect(result).to.have.lengthOf(1) + expect(result).to.have.nested.property('[0].type').that.equals('print') + expect(result).to.have.nested.property('[0].msg').that.equals(result[0]?.json?.id) + + if (result[0]?.json?.id) { + await client.deleteTemplate(result[0].json.id).catch(() => {}) + } + } + }), + ) + }) + + describe('get', () => { + it( + 'should get templates', + testCase(async (client) => { + const response = await client.listTemplates({ pagesize: 5 }) + const templatesList = response.items + if (templatesList.length === 0) throw new Error('account has no templates to fetch') + + const expectations = await Promise.all( + templatesList.map((template) => client.getTemplate(template.id)), + ) + + const actuals = await Promise.all( + templatesList.map(async (template) => { + const output = new OutputCtl() + await templates.get(output, client, { templates: [template.id] }) + return output.get() as OutputEntry[] + }), + ) + + for (const [expectation, actual] of zip(expectations, actuals)) { + expect(actual).to.have.lengthOf(1) + expect(actual).to.have.nested.property('[0].type').that.equals('print') + expect(actual).to.have.nested.property('[0].json').that.deep.equals(expectation) + } + }), + ) + + it( + 'should return templates in the order specified', + testCase(async (client) => { + const response = await client.listTemplates({ pagesize: 5 }) + const items = response.items.sort(() => 2 * Math.floor(Math.random() * 2) - 1) + if (items.length === 0) throw new Error('account has no templates to fetch') + + const ids = items.map((template) => template.id) + + const output = new OutputCtl() + await templates.get(output, client, { templates: ids }) + const results = output.get() as OutputEntry[] + + expect(results).to.have.lengthOf(ids.length) + for (const [result, id] of zip(results, ids)) { + expect(result).to.have.property('type').that.equals('print') + expect(result).to.have.nested.property('json.id').that.equals(id) + } + }), + ) + }) + + describe('modify', () => { + let templateId: string + const originalName = `original-name-${testId}` + + beforeAll(async () => { + const client = new TransloaditClient({ authKey, authSecret }) + const response = await client.createTemplate({ + name: originalName, + template: { + steps: { dummy: { robot: '/html/convert', url: 'https://example.com' } }, + } as TemplateContent, + }) + templateId = response.id + }) + + it( + 'should modify but not rename the template', + testCase(async (client) => { + const steps = { import: { robot: '/http/import', url: 'https://example.com/modified' } } + await fsp.writeFile('template.json', JSON.stringify(steps)) + + const output = new OutputCtl() + await templates.modify(output, client, { + template: templateId, + file: 'template.json', + }) + const result = output.get() + + expect(result).to.have.lengthOf(0) + await delay(2000) + const template = await client.getTemplate(templateId) + expect(template).to.have.property('name').that.equals(originalName) + expect(template).to.have.property('content').that.has.property('steps') + }), + ) + + it( + 'should not modify but rename the template', + testCase(async (client) => { + await fsp.writeFile('template.json', '') + + const newName = `new-name-${testId}` + const output = new OutputCtl() + await templates.modify(output, client, { + template: templateId, + name: newName, + file: 'template.json', + }) + const result = output.get() + + expect(result).to.have.lengthOf(0) + await delay(2000) + const template = await client.getTemplate(templateId) + expect(template).to.have.property('name').that.equals(newName) + expect(template).to.have.property('content').that.has.property('steps') + }), + ) + + it( + 'should modify and rename the template', + testCase(async (client) => { + const steps = { import: { robot: '/http/import', url: 'https://example.com/renamed' } } + await fsp.writeFile('template.json', JSON.stringify(steps)) + + const newerName = `newer-name-${testId}` + const output = new OutputCtl() + await templates.modify(output, client, { + template: templateId, + name: newerName, + file: 'template.json', + }) + const result = output.get() + + expect(result).to.have.lengthOf(0) + await delay(2000) + const template = await client.getTemplate(templateId) + expect(template).to.have.property('name').that.equals(newerName) + expect(template).to.have.property('content').that.has.property('steps') + }), + ) + + afterAll(async () => { + const client = new TransloaditClient({ authKey, authSecret }) + await client.deleteTemplate(templateId) + }) + }) + + describe('delete', () => { + it( + 'should delete templates', + testCase(async (client) => { + const ids = await Promise.all( + [1, 2, 3, 4, 5].map(async (n) => { + const response = await client.createTemplate({ + name: `delete-test-${testId}-${n}`, + template: { + steps: { dummy: { robot: '/html/convert', url: `https://example.com/${n}` } }, + } as TemplateContent, + }) + return response.id + }), + ) + + const output = new OutputCtl() + await templates.delete(output, client, { templates: ids }) + const result = output.get() + + expect(result).to.have.lengthOf(0) + await Promise.all( + ids.map(async (id) => { + try { + const response = await client.getTemplate(id) + expect(response).to.not.exist + } catch (err) { + const error = err as { + code?: string + transloaditErrorCode?: string + response?: { body?: { error?: string } } + } + const errorCode = + error.code || error.transloaditErrorCode || error.response?.body?.error + if (errorCode !== 'TEMPLATE_NOT_FOUND') { + console.error('Delete failed with unexpected error:', err, 'Code:', errorCode) + throw err + } + } + }), + ) + }), + ) + }) + + describe('sync', () => { + it( + 'should handle directories recursively', + testCase(async (client) => { + const response = await client.listTemplates({ pagesize: 5 }) + const templateIds = response.items.map((item) => ({ id: item.id, name: item.name })) + + let dirname = 'd' + const files: string[] = [] + for (const { id, name } of templateIds) { + const fname = path.join(dirname, `${name}.json`) + await fsp.mkdir(dirname, { recursive: true }) + await fsp.writeFile(fname, `{"transloadit_template_id":"${id}"}`) + files.push(fname) + dirname = path.join(dirname, 'd') + } + + const output = new OutputCtl() + await templates.sync(output, client, { recursive: true, files: ['d'] }) + const result = output.get() + + expect(result).to.have.lengthOf(0) + const contents = await Promise.all( + files.map( + async (file) => JSON.parse(await fsp.readFile(file, 'utf8')) as Record, + ), + ) + for (const [content, idObj] of zip(contents, templateIds)) { + expect(content).to.have.property('transloadit_template_id').that.equals(idObj.id) + expect(content).to.have.property('steps') + } + }), + ) + + it( + 'should update local files when outdated', + testCase(async (client) => { + const params = { + name: `test-local-update-${testId}`, + template: { + steps: { dummy: { robot: '/html/convert', url: 'https://example.com/changed' } }, + } as TemplateContent, + } + const response = await client.createTemplate(params) + const id = response.id + + try { + const fname = `${params.name}.json` + await fsp.writeFile( + fname, + JSON.stringify({ + transloadit_template_id: id, + steps: { changed: false }, + }), + ) + await fsp.utimes(fname, 0, 0) + + const output = new OutputCtl() + await templates.sync(output, client, { files: [fname] }) + const result = output.get() + + expect(result).to.have.lengthOf(0) + const content = JSON.parse(await fsp.readFile(fname, 'utf8')) as Record + expect(content).to.have.property('steps') + const fetchedTemplate = await client.getTemplate(id) + expect(fetchedTemplate).to.have.property('content').that.has.property('steps') + } finally { + await client.deleteTemplate(id).catch(() => {}) + } + }), + ) + + it( + 'should update remote template when outdated', + testCase(async (client) => { + const params = { + name: `test-remote-update-${testId}`, + template: { + steps: { dummy: { robot: '/html/convert', url: 'https://example.com/unchanged' } }, + } as TemplateContent, + } + const response = await client.createTemplate(params) + const id = response.id + + try { + const fname = `${params.name}.json` + await fsp.writeFile( + fname, + JSON.stringify({ + transloadit_template_id: id, + steps: { changed: true }, + }), + ) + await fsp.utimes(fname, Date.now() * 2, Date.now() * 2) + + const output = new OutputCtl() + await templates.sync(output, client, { files: [fname] }) + const result = output.get() + + expect(result).to.have.lengthOf(0) + const content = JSON.parse(await fsp.readFile(fname, 'utf8')) as Record + expect(content).to.have.property('steps') + const fetchedTemplate = await client.getTemplate(id) + expect(fetchedTemplate).to.have.property('content').that.has.property('steps') + } finally { + await client.deleteTemplate(id).catch(() => {}) + } + }), + ) + }) +}) diff --git a/test/e2e/cli/test-utils.ts b/test/e2e/cli/test-utils.ts new file mode 100644 index 00000000..799cd2e5 --- /dev/null +++ b/test/e2e/cli/test-utils.ts @@ -0,0 +1,70 @@ +import { exec } from 'node:child_process' +import fsp from 'node:fs/promises' +import path from 'node:path' +import process from 'node:process' +import { fileURLToPath } from 'node:url' +import { promisify } from 'node:util' +import { rimraf } from 'rimraf' +import 'dotenv/config' +import { Transloadit as TransloaditClient } from '../../../src/Transloadit.ts' + +export const execAsync = promisify(exec) + +const __dirname = path.dirname(fileURLToPath(import.meta.url)) +export const cliPath = path.resolve(__dirname, '../../../src/cli.ts') + +export const tmpDir = '/tmp' + +if (!process.env.TRANSLOADIT_KEY || !process.env.TRANSLOADIT_SECRET) { + console.error( + 'Please provide environment variables TRANSLOADIT_KEY and TRANSLOADIT_SECRET to run tests', + ) + process.exit(1) +} + +export const authKey = process.env.TRANSLOADIT_KEY +export const authSecret = process.env.TRANSLOADIT_SECRET + +process.setMaxListeners(Number.POSITIVE_INFINITY) + +export function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)) +} + +export interface OutputEntry { + type: string + msg: unknown + json?: { id?: string; assembly_id?: string } & Record +} + +export function testCase(cb: (client: TransloaditClient) => Promise): () => Promise { + const cwd = process.cwd() + return async () => { + const dirname = path.join( + tmpDir, + `transloadit_test-${Date.now()}-${Math.floor(Math.random() * 10000)}`, + ) + const client = new TransloaditClient({ authKey, authSecret }) + try { + await fsp.mkdir(dirname) + process.chdir(dirname) + return await cb(client) + } finally { + process.chdir(cwd) + await rimraf(dirname) + } + } +} + +export function runCli( + args: string, + env: Record = {}, +): Promise<{ stdout: string; stderr: string }> { + return execAsync(`npx tsx ${cliPath} ${args}`, { + env: { ...process.env, ...env }, + }) +} + +export function createClient(): TransloaditClient { + return new TransloaditClient({ authKey, authSecret }) +} diff --git a/test/integration/fixtures/zerobytes.jpg b/test/e2e/fixtures/zerobytes.jpg similarity index 100% rename from test/integration/fixtures/zerobytes.jpg rename to test/e2e/fixtures/zerobytes.jpg diff --git a/test/integration/live-api.test.ts b/test/e2e/live-api.test.ts similarity index 97% rename from test/integration/live-api.test.ts rename to test/e2e/live-api.test.ts index 817d41d3..b1a13f5e 100644 --- a/test/integration/live-api.test.ts +++ b/test/e2e/live-api.test.ts @@ -12,13 +12,14 @@ import intoStream from 'into-stream' import * as temp from 'temp' import type { InterpolatableRobotFileFilterInstructionsInput } from '../../src/alphalib/types/robots/file-filter.ts' import type { InterpolatableRobotImageResizeInstructionsInput } from '../../src/alphalib/types/robots/image-resize.ts' -import { - type CreateAssemblyOptions, - type CreateAssemblyParams, - Transloadit, - type UploadProgress, +import type { + CreateAssemblyOptions, + CreateAssemblyParams, + UploadProgress, } from '../../src/Transloadit.ts' -import { createTestServer, type TestServer } from '../testserver.ts' +import { Transloadit } from '../../src/Transloadit.ts' +import type { TestServer } from '../testserver.ts' +import { createTestServer } from '../testserver.ts' import { createProxy } from '../util.ts' // Load environment variables from .env file @@ -154,7 +155,7 @@ interface VirtualTestServer { url: string } -async function createVirtualTestServer(handler: RequestListener): Promise { +function createVirtualTestServer(handler: RequestListener): VirtualTestServer { const id = randomUUID() log('Adding virtual server handler', id) const url = `${testServer.url}/${id}` @@ -644,7 +645,7 @@ describe('API integration', { timeout: 60000, retry: 1 }, () => { it('should send a notification upon assembly completion', async () => { await new Promise((resolve, reject) => { - const onNotification: OnNotification = async ({ path }) => { + const onNotification: OnNotification = ({ path }) => { try { expect(path).toBe('/') resolve() @@ -722,7 +723,10 @@ describe('API integration', { timeout: 60000, retry: 1 }, () => { }) it('should allow creating a template', async () => { - const template = await client.createTemplate({ name: templName, template: genericParams }) + const template = await client.createTemplate({ + name: templName, + template: { steps: genericParams.steps }, + }) templId = template.id }) @@ -732,7 +736,7 @@ describe('API integration', { timeout: 60000, retry: 1 }, () => { const template = await client.getTemplate(nn(templId, 'templId')) const { name, content } = template expect(name).toBe(templName) - expect(content).toEqual(genericParams) + expect(content).toEqual({ steps: genericParams.steps }) }) it('should allow editing a template', async () => { diff --git a/test/testserver.ts b/test/testserver.ts index efe23023..5368c5ad 100644 --- a/test/testserver.ts +++ b/test/testserver.ts @@ -1,9 +1,10 @@ -import { createServer, type RequestListener, type Server } from 'node:http' +import type { RequestListener, Server } from 'node:http' +import { createServer } from 'node:http' import { setTimeout } from 'node:timers/promises' import debug from 'debug' import got from 'got' - -import { type CreateTunnelResult, createTunnel } from './tunnel.ts' +import type { CreateTunnelResult } from './tunnel.ts' +import { createTunnel } from './tunnel.ts' const log = debug('transloadit:testserver') @@ -12,7 +13,7 @@ interface HttpServer { port: number } -async function createHttpServer(handler: RequestListener): Promise { +function createHttpServer(handler: RequestListener): Promise { return new Promise((resolve, reject) => { const server = createServer(handler) diff --git a/test/tunnel.ts b/test/tunnel.ts index b52f1e0a..41c6327a 100644 --- a/test/tunnel.ts +++ b/test/tunnel.ts @@ -2,7 +2,8 @@ import { Resolver } from 'node:dns/promises' import { createInterface } from 'node:readline' import * as timers from 'node:timers/promises' import debug from 'debug' -import { ExecaError, execa, type ResultPromise } from 'execa' +import type { ResultPromise } from 'execa' +import { ExecaError, execa } from 'execa' import pRetry from 'p-retry' const log = debug('transloadit:cloudflared-tunnel') @@ -54,6 +55,7 @@ async function startTunnel({ cloudFlaredPath, port }: CreateTunnelParams) { 'failed to sufficiently increase receive buffer size', 'update check failed error', 'failed to parse quick Tunnel ID', + 'failed to unmarshal quick Tunnel', // Transient Cloudflare API JSON parsing error ] rl.on('line', (line) => { diff --git a/test/unit/test-cli.test.ts b/test/unit/cli/test-cli.test.ts similarity index 95% rename from test/unit/test-cli.test.ts rename to test/unit/cli/test-cli.test.ts index 9dd7f3d9..f630cb20 100644 --- a/test/unit/test-cli.test.ts +++ b/test/unit/cli/test-cli.test.ts @@ -3,10 +3,9 @@ import { tmpdir } from 'node:os' import path from 'node:path' import { fileURLToPath } from 'node:url' import { afterEach, describe, expect, it, vi } from 'vitest' -import * as cli from '../../src/cli.ts' -import { Transloadit } from '../../src/Transloadit.ts' - -const { main, runSig, runSmartSig, shouldRunCli } = cli +import { runSig, runSmartSig } from '../../../src/cli/commands/auth.ts' +import { main, shouldRunCli } from '../../../src/cli.ts' +import { Transloadit } from '../../../src/Transloadit.ts' const resetExitCode = () => { process.exitCode = undefined @@ -21,7 +20,7 @@ afterEach(() => { describe('cli smart_sig', () => { it('recognizes symlinked invocation paths', () => { const tmpDir = mkdtempSync(path.join(tmpdir(), 'transloadit-cli-')) - const symlinkTarget = fileURLToPath(new URL('../../src/cli.ts', import.meta.url)) + const symlinkTarget = fileURLToPath(new URL('../../../src/cli.ts', import.meta.url)) const symlinkPath = path.join(tmpDir, 'transloadit') symlinkSync(symlinkTarget, symlinkPath) @@ -358,17 +357,13 @@ describe('cli sig', () => { }) describe('cli help', () => { - it('prints usage when no command is provided', async () => { + it('prints usage when --help is provided', async () => { const stdoutSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) - const stderrSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) - await main([]) + await main(['--help']) - expect(stderrSpy).not.toHaveBeenCalled() expect(stdoutSpy).toHaveBeenCalled() - const message = `${stdoutSpy.mock.calls[0]?.[0]}` - expect(message).toContain('npx transloadit smart_sig') - expect(message).toContain('npx transloadit sig') - expect(process.exitCode).toBe(1) + const message = stdoutSpy.mock.calls.map((call) => `${call[0]}`).join('') + expect(message).toContain('Transloadit CLI') }) }) diff --git a/test/unit/mock-http.test.ts b/test/unit/mock-http.test.ts index d11a0c24..9f572c18 100644 --- a/test/unit/mock-http.test.ts +++ b/test/unit/mock-http.test.ts @@ -1,12 +1,10 @@ import { inspect } from 'node:util' import nock from 'nock' - +import type { AssemblyStatus, Options } from '../../src/Transloadit.ts' import { ApiError, - type AssemblyStatus, assemblyInstructionsSchema, InconsistentResponseError, - type Options, TimeoutError, Transloadit, } from '../../src/Transloadit.ts' @@ -56,6 +54,54 @@ describe('Mocked API tests', () => { scope.done() }) + it('should honor abort signal during awaitAssemblyCompletion polling', async () => { + const client = getLocalClient() + + // Set up a mock that keeps returning ASSEMBLY_EXECUTING (never completes) + const scope = nock('http://localhost') + .get('/assemblies/1') + .query(() => true) + .reply(200, { ok: 'ASSEMBLY_EXECUTING', assembly_url: '', assembly_ssl_url: '' }) + .persist() // Keep responding with same status + + const controller = new AbortController() + + // Abort after 50ms + setTimeout(() => controller.abort(), 50) + + await expect( + client.awaitAssemblyCompletion('1', { interval: 10, signal: controller.signal }), + ).rejects.toThrow(expect.objectContaining({ name: 'AbortError' })) + + scope.persist(false) + }) + + it('should stop polling early when onPoll returns false', async () => { + const client = getLocalClient() + + let pollCount = 0 + const scope = nock('http://localhost') + .get('/assemblies/1') + .query(() => true) + .reply(200, { ok: 'ASSEMBLY_EXECUTING', assembly_url: '', assembly_ssl_url: '' }) + .persist() + + const result = await client.awaitAssemblyCompletion('1', { + interval: 10, + onPoll: () => { + pollCount++ + // Stop after 3 polls + return pollCount < 3 + }, + }) + + // Should have the last polled status (ASSEMBLY_EXECUTING), not completed + expect((result as { ok: string }).ok).toBe('ASSEMBLY_EXECUTING') + expect(pollCount).toBe(3) + + scope.persist(false) + }) + it('should handle aborted correctly', async () => { const client = getLocalClient() @@ -157,30 +203,30 @@ describe('Mocked API tests', () => { // console.log(inspect(errorString)) expect(inspect(errorString).split('\n')).toEqual([ expect.stringMatching( - `API error \\(HTTP 400\\) INVALID_FILE_META_DATA: Invalid file metadata https://api2-oltu.transloadit.com/assemblies/foo`, + 'API error \\(HTTP 400\\) INVALID_FILE_META_DATA: Invalid file metadata https://api2-oltu.transloadit.com/assemblies/foo', ), - expect.stringMatching(` at .+`), - expect.stringMatching(` at .+`), + expect.stringMatching(' at .+'), + expect.stringMatching(' at .+'), expect.stringMatching( - ` at createAssemblyAndUpload \\(.+\\/src\\/Transloadit\\.ts:\\d+:\\d+\\)`, + ' at createAssemblyAndUpload \\(.+\\/src\\/Transloadit\\.ts:\\d+:\\d+\\)', ), - expect.stringMatching(` at .+\\/test\\/unit\\/mock-http\\.test\\.ts:\\d+:\\d+`), - expect.stringMatching(` at .+`), + expect.stringMatching(' at .+\\/test\\/unit\\/mock-http\\.test\\.ts:\\d+:\\d+'), + expect.stringMatching(' at .+'), expect.stringMatching(` code: 'INVALID_FILE_META_DATA',`), expect.stringMatching(` rawMessage: 'Invalid file metadata',`), - expect.stringMatching(` reason: undefined,`), + expect.stringMatching(' reason: undefined,'), expect.stringMatching( ` assemblySslUrl: 'https:\\/\\/api2-oltu\\.transloadit\\.com\\/assemblies\\/foo'`, ), expect.stringMatching(` assemblyId: '123',`), - expect.stringMatching(` cause: HTTPError: Response code 400 \\(Bad Request\\)`), - expect.stringMatching(` at .+`), - expect.stringMatching(` at .+`), - expect.stringMatching(` at .+`), - expect.stringMatching(` at .+`), - expect.stringMatching(` at .+`), - expect.stringMatching(` at .+`), - expect.stringMatching(` input: undefined,`), + expect.stringMatching(' cause: HTTPError: Response code 400 \\(Bad Request\\)'), + expect.stringMatching(' at .+'), + expect.stringMatching(' at .+'), + expect.stringMatching(' at .+'), + expect.stringMatching(' at .+'), + expect.stringMatching(' at .+'), + expect.stringMatching(' at .+'), + expect.stringMatching(' input: undefined,'), expect.stringMatching(` code: 'ERR_NON_2XX_3XX_RESPONSE',`), expect.stringMatching(' \\[cause\\]: {}'), expect.stringMatching(' }'), diff --git a/test/unit/test-transloadit-client.test.ts b/test/unit/test-transloadit-client.test.ts index d5f57159..0fc8f228 100644 --- a/test/unit/test-transloadit-client.test.ts +++ b/test/unit/test-transloadit-client.test.ts @@ -1,4 +1,5 @@ -import { PassThrough, type Readable } from 'node:stream' +import type { Readable } from 'node:stream' +import { PassThrough } from 'node:stream' import FormData from 'form-data' import got, { type CancelableRequest } from 'got' diff --git a/test/unit/transloadit-advanced.test.ts b/test/unit/transloadit-advanced.test.ts index 64b4c1c9..712db296 100644 --- a/test/unit/transloadit-advanced.test.ts +++ b/test/unit/transloadit-advanced.test.ts @@ -151,22 +151,22 @@ describe('Transloadit advanced behaviors', () => { it('streams assemblies page by page until all items are read', async () => { type ListAssembliesReturn = Awaited> - const listAssemblies = vi.spyOn(client, 'listAssemblies').mockImplementation(async (params) => { + const listAssemblies = vi.spyOn(client, 'listAssemblies').mockImplementation((params) => { const page = params?.page ?? 1 if (page === 1) { - return { + return Promise.resolve({ items: [{ id: 1 }, { id: 2 }], count: 3, - } as unknown as ListAssembliesReturn + } as unknown as ListAssembliesReturn) } if (page === 2) { - return { + return Promise.resolve({ items: [{ id: 3 }], count: 3, - } as unknown as ListAssembliesReturn + } as unknown as ListAssembliesReturn) } - return { items: [], count: 3 } as unknown as ListAssembliesReturn + return Promise.resolve({ items: [], count: 3 } as unknown as ListAssembliesReturn) }) const stream = client.streamAssemblies({ page: 1 } as never) diff --git a/test/unit/tus.test.ts b/test/unit/tus.test.ts index a96fc547..c98623fc 100644 --- a/test/unit/tus.test.ts +++ b/test/unit/tus.test.ts @@ -84,9 +84,9 @@ describe('sendTusRequest', () => { [secondPath]: 2048, } - statMock.mockImplementation(async (path: StatPathArg) => { + statMock.mockImplementation((path: StatPathArg) => { const key = typeof path === 'string' ? path : path.toString() - return createStatResult(sizesByPath[key] ?? 0) + return Promise.resolve(createStatResult(sizesByPath[key] ?? 0)) }) const onProgress = vi.fn() @@ -155,9 +155,9 @@ describe('sendTusRequest', () => { [secondPath]: 100, } - statMock.mockImplementation(async (path: StatPathArg) => { + statMock.mockImplementation((path: StatPathArg) => { const key = typeof path === 'string' ? path : path.toString() - return createStatResult(sizesByPath[key] ?? 0) + return Promise.resolve(createStatResult(sizesByPath[key] ?? 0)) }) const onProgress = vi.fn() diff --git a/test/util.ts b/test/util.ts index 1a4c4197..ed71d0f8 100644 --- a/test/util.ts +++ b/test/util.ts @@ -1,4 +1,5 @@ -import { RequestError, type Transloadit } from '../src/Transloadit.ts' +import type { Transloadit } from '../src/Transloadit.ts' +import { RequestError } from '../src/Transloadit.ts' export const createProxy = (transloaditInstance: Transloadit) => { return new Proxy(transloaditInstance, { diff --git a/vitest.config.ts b/vitest.config.ts index 7c34317f..36c3dcae 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -4,7 +4,7 @@ export default defineConfig({ test: { coverage: { include: ['src/**/*.ts'], - exclude: ['**/*.d.ts', '**/*.test.ts', '**/test/**', '**/alphalib/**'], + exclude: ['**/*.d.ts', '**/*.test.ts', '**/test/**', '**/alphalib/**', '**/cli/**'], reporter: ['json', 'lcov', 'text', 'clover', 'json-summary', 'html'], provider: 'v8', thresholds: { @@ -17,5 +17,12 @@ export default defineConfig({ }, }, globals: true, + testTimeout: 100000, + exclude: [ + '**/node_modules/**', + '**/dist/**', + 'test/e2e/cli/test-utils.ts', + 'test/e2e/cli/OutputCtl.ts', + ], }, }) diff --git a/yarn.lock b/yarn.lock index f42faea1..608edba8 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1012,6 +1012,22 @@ __metadata: languageName: node linkType: hard +"@isaacs/balanced-match@npm:^4.0.1": + version: 4.0.1 + resolution: "@isaacs/balanced-match@npm:4.0.1" + checksum: 10c0/7da011805b259ec5c955f01cee903da72ad97c5e6f01ca96197267d3f33103d5b2f8a1af192140f3aa64526c593c8d098ae366c2b11f7f17645d12387c2fd420 + languageName: node + linkType: hard + +"@isaacs/brace-expansion@npm:^5.0.0": + version: 5.0.0 + resolution: "@isaacs/brace-expansion@npm:5.0.0" + dependencies: + "@isaacs/balanced-match": "npm:^4.0.1" + checksum: 10c0/b4d4812f4be53afc2c5b6c545001ff7a4659af68d4484804e9d514e183d20269bb81def8682c01a22b17c4d6aed14292c8494f7d2ac664e547101c1a905aa977 + languageName: node + linkType: hard + "@isaacs/cliui@npm:^8.0.2": version: 8.0.2 resolution: "@isaacs/cliui@npm:8.0.2" @@ -1991,6 +2007,15 @@ __metadata: languageName: node linkType: hard +"@types/recursive-readdir@npm:^2.2.4": + version: 2.2.4 + resolution: "@types/recursive-readdir@npm:2.2.4" + dependencies: + "@types/node": "npm:*" + checksum: 10c0/089f1a66595587e62cd6464748d34bfc607e7122dc36d9747e0e8c6a690d4319d9c1427e3c8f60c0ab820ce035f4c8623887f8e5f6075582364d7a3c0cb1d004 + languageName: node + linkType: hard + "@types/temp@npm:^0.9.4": version: 0.9.4 resolution: "@types/temp@npm:0.9.4" @@ -2432,6 +2457,17 @@ __metadata: languageName: node linkType: hard +"clipanion@npm:^4.0.0-rc.4": + version: 4.0.0-rc.4 + resolution: "clipanion@npm:4.0.0-rc.4" + dependencies: + typanion: "npm:^3.8.0" + peerDependencies: + typanion: "*" + checksum: 10c0/047b415b59a5e9777d00690fba563ccc850eca6bf27790a88d1deea3ecc8a89840ae9aed554ff284cc698a9f3f20256e43c25ff4a7c4c90a71e5e7d9dca61dd1 + languageName: node + linkType: hard + "color-convert@npm:^0.5.2": version: 0.5.3 resolution: "color-convert@npm:0.5.3" @@ -2643,10 +2679,10 @@ __metadata: languageName: node linkType: hard -"dotenv@npm:^17.2.2": - version: 17.2.2 - resolution: "dotenv@npm:17.2.2" - checksum: 10c0/be66513504590aff6eccb14167625aed9bd42ce80547f4fe5d195860211971a7060949b57108dfaeaf90658f79e40edccd3f233f0a978bff507b5b1565ae162b +"dotenv@npm:^17.2.3": + version: 17.2.3 + resolution: "dotenv@npm:17.2.3" + checksum: 10c0/c884403209f713214a1b64d4d1defa4934c2aa5b0002f5a670ae298a51e3c3ad3ba79dfee2f8df49f01ae74290fcd9acdb1ab1d09c7bfb42b539036108bb2ba0 languageName: node linkType: hard @@ -2931,6 +2967,13 @@ __metadata: languageName: node linkType: hard +"eventemitter3@npm:^5.0.1": + version: 5.0.1 + resolution: "eventemitter3@npm:5.0.1" + checksum: 10c0/4ba5c00c506e6c786b4d6262cfbce90ddc14c10d4667e5c83ae993c9de88aa856033994dd2b35b83e8dc1170e224e66a319fa80adc4c32adcd2379bbc75da814 + languageName: node + linkType: hard + "execa@npm:9.6.0": version: 9.6.0 resolution: "execa@npm:9.6.0" @@ -3173,6 +3216,17 @@ __metadata: languageName: node linkType: hard +"glob@npm:^13.0.0": + version: 13.0.0 + resolution: "glob@npm:13.0.0" + dependencies: + minimatch: "npm:^10.1.1" + minipass: "npm:^7.1.2" + path-scurry: "npm:^2.0.0" + checksum: 10c0/8e2f5821f3f7c312dd102e23a15b80c79e0837a9872784293ba2e15ec73b3f3749a49a42a31bfcb4e52c84820a474e92331c2eebf18819d20308f5c33876630a + languageName: node + linkType: hard + "glob@npm:^7.1.3": version: 7.2.3 resolution: "glob@npm:7.2.3" @@ -3361,6 +3415,15 @@ __metadata: languageName: node linkType: hard +"image-size@npm:^2.0.2": + version: 2.0.2 + resolution: "image-size@npm:2.0.2" + bin: + image-size: bin/image-size.js + checksum: 10c0/f09dd0f7cf8511cd20e4f756bdb5a7cb6d2240de3323f41bde266bed8373392a293892bf12e907e2995f52833fd88dd27cf6b1a52ab93968afc716cb78cd7b79 + languageName: node + linkType: hard + "imurmurhash@npm:^0.1.4": version: 0.1.4 resolution: "imurmurhash@npm:0.1.4" @@ -3894,6 +3957,13 @@ __metadata: languageName: node linkType: hard +"lru-cache@npm:^11.0.0": + version: 11.2.4 + resolution: "lru-cache@npm:11.2.4" + checksum: 10c0/4a24f9b17537619f9144d7b8e42cd5a225efdfd7076ebe7b5e7dc02b860a818455201e67fbf000765233fe7e339d3c8229fc815e9b58ee6ede511e07608c19b2 + languageName: node + linkType: hard + "magic-string@npm:^0.30.17": version: 0.30.17 resolution: "magic-string@npm:0.30.17" @@ -3986,7 +4056,16 @@ __metadata: languageName: node linkType: hard -"minimatch@npm:^3.0.4, minimatch@npm:^3.1.1": +"minimatch@npm:^10.1.1": + version: 10.1.1 + resolution: "minimatch@npm:10.1.1" + dependencies: + "@isaacs/brace-expansion": "npm:^5.0.0" + checksum: 10c0/c85d44821c71973d636091fddbfbffe62370f5ee3caf0241c5b60c18cd289e916200acb2361b7e987558cd06896d153e25d505db9fc1e43e6b4b6752e2702902 + languageName: node + linkType: hard + +"minimatch@npm:^3.0.4, minimatch@npm:^3.0.5, minimatch@npm:^3.1.1": version: 3.1.2 resolution: "minimatch@npm:3.1.2" dependencies: @@ -4168,6 +4247,13 @@ __metadata: languageName: node linkType: hard +"node-watch@npm:^0.7.4": + version: 0.7.4 + resolution: "node-watch@npm:0.7.4" + checksum: 10c0/05c3e66e7b5013d64c31a6dd96b55d87c14c8c0515d05d73554d706a1f8b962fe31781dce74740db29c0ec7c9a1f33a6bac07ef1e8aecc0d38c5ab4eef4c7ac0 + languageName: node + linkType: hard + "nopt@npm:^8.0.0": version: 8.1.0 resolution: "nopt@npm:8.1.0" @@ -4298,6 +4384,16 @@ __metadata: languageName: node linkType: hard +"p-queue@npm:^9.0.1": + version: 9.0.1 + resolution: "p-queue@npm:9.0.1" + dependencies: + eventemitter3: "npm:^5.0.1" + p-timeout: "npm:^7.0.0" + checksum: 10c0/912bd0c09ec910e6851973757afd23b80e12210948b4e235ebaa752e32e782d7664438c948cd343e2620cf26340665ccdaa2715e4e3c52cf02fe11c5152c225f + languageName: node + linkType: hard + "p-retry@npm:^7.0.0": version: 7.0.0 resolution: "p-retry@npm:7.0.0" @@ -4307,7 +4403,14 @@ __metadata: languageName: node linkType: hard -"package-json-from-dist@npm:^1.0.0": +"p-timeout@npm:^7.0.0": + version: 7.0.1 + resolution: "p-timeout@npm:7.0.1" + checksum: 10c0/87d96529d1096d506607218dba6f9ec077c6dbedd0c2e2788c748e33bcd05faae8a81009fd9d22ec0b3c95fc83f4717306baba223f6e464737d8b99294c3e863 + languageName: node + linkType: hard + +"package-json-from-dist@npm:^1.0.0, package-json-from-dist@npm:^1.0.1": version: 1.0.1 resolution: "package-json-from-dist@npm:1.0.1" checksum: 10c0/62ba2785eb655fec084a257af34dbe24292ab74516d6aecef97ef72d4897310bc6898f6c85b5cd22770eaa1ce60d55a0230e150fb6a966e3ecd6c511e23d164b @@ -4376,6 +4479,16 @@ __metadata: languageName: node linkType: hard +"path-scurry@npm:^2.0.0": + version: 2.0.1 + resolution: "path-scurry@npm:2.0.1" + dependencies: + lru-cache: "npm:^11.0.0" + minipass: "npm:^7.1.2" + checksum: 10c0/2a16ed0e81fbc43513e245aa5763354e25e787dab0d539581a6c3f0f967461a159ed6236b2559de23aa5b88e7dc32b469b6c47568833dd142a4b24b4f5cd2620 + languageName: node + linkType: hard + "path-type@npm:^3.0.0": version: 3.0.0 resolution: "path-type@npm:3.0.0" @@ -4516,6 +4629,15 @@ __metadata: languageName: node linkType: hard +"recursive-readdir@npm:^2.2.3": + version: 2.2.3 + resolution: "recursive-readdir@npm:2.2.3" + dependencies: + minimatch: "npm:^3.0.5" + checksum: 10c0/d0238f137b03af9cd645e1e0b40ae78b6cda13846e3ca57f626fcb58a66c79ae018a10e926b13b3a460f1285acc946a4e512ea8daa2e35df4b76a105709930d1 + languageName: node + linkType: hard + "reflect.getprototypeof@npm:^1.0.6, reflect.getprototypeof@npm:^1.0.9": version: 1.0.10 resolution: "reflect.getprototypeof@npm:1.0.10" @@ -4609,6 +4731,18 @@ __metadata: languageName: node linkType: hard +"rimraf@npm:^6.1.2": + version: 6.1.2 + resolution: "rimraf@npm:6.1.2" + dependencies: + glob: "npm:^13.0.0" + package-json-from-dist: "npm:^1.0.1" + bin: + rimraf: dist/esm/bin.mjs + checksum: 10c0/c11a6a6fad937ada03c12fe688860690df8296d7cd08dbe59e3cc087f44e43573ae26ecbe48e54cb7a6db745b8c81fe5a15b9359233cc21d52d9b5b3330fcc74 + languageName: node + linkType: hard + "rimraf@npm:~2.6.2": version: 2.6.3 resolution: "rimraf@npm:2.6.3" @@ -5242,20 +5376,27 @@ __metadata: "@biomejs/biome": "npm:^2.2.4" "@transloadit/sev-logger": "npm:^0.0.15" "@types/debug": "npm:^4.1.12" + "@types/recursive-readdir": "npm:^2.2.4" "@types/temp": "npm:^0.9.4" "@vitest/coverage-v8": "npm:^3.2.4" badge-maker: "npm:^5.0.2" + clipanion: "npm:^4.0.0-rc.4" debug: "npm:^4.4.3" - dotenv: "npm:^17.2.2" + dotenv: "npm:^17.2.3" execa: "npm:9.6.0" form-data: "npm:^4.0.4" got: "npm:14.4.9" + image-size: "npm:^2.0.2" into-stream: "npm:^9.0.0" is-stream: "npm:^4.0.1" nock: "npm:^14.0.10" + node-watch: "npm:^0.7.4" npm-run-all: "npm:^4.1.5" p-map: "npm:^7.0.3" + p-queue: "npm:^9.0.1" p-retry: "npm:^7.0.0" + recursive-readdir: "npm:^2.2.3" + rimraf: "npm:^6.1.2" temp: "npm:^0.9.4" tsx: "npm:4.20.5" tus-js-client: "npm:^4.3.1" @@ -5306,6 +5447,13 @@ __metadata: languageName: node linkType: hard +"typanion@npm:^3.8.0": + version: 3.14.0 + resolution: "typanion@npm:3.14.0" + checksum: 10c0/8b03b19844e6955bfd906c31dc781bae6d7f1fb3ce4fe24b7501557013d4889ae5cefe671dafe98d87ead0adceb8afcb8bc16df7dc0bd2b7331bac96f3a7cae2 + languageName: node + linkType: hard + "type-fest@npm:^4.26.1, type-fest@npm:^4.41.0": version: 4.41.0 resolution: "type-fest@npm:4.41.0"