From 8c4af3228f5d7e39faa5fa4d536627e9e76e35e7 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Tue, 2 Dec 2025 10:31:36 +0100 Subject: [PATCH 01/45] first stab at merging in transloadify --- package.json | 9 +- src/cli.ts | 317 +------------ src/cli/JobsPromise.ts | 27 ++ src/cli/OutputCtl.ts | 56 +++ src/cli/assemblies-create.ts | 746 ++++++++++++++++++++++++++++++ src/cli/assemblies.ts | 151 ++++++ src/cli/bills.ts | 36 ++ src/cli/commands/BaseCommand.ts | 73 +++ src/cli/commands/assemblies.ts | 279 +++++++++++ src/cli/commands/auth.ts | 443 ++++++++++++++++++ src/cli/commands/bills.ts | 52 +++ src/cli/commands/index.ts | 64 +++ src/cli/commands/notifications.ts | 89 ++++ src/cli/commands/templates.ts | 226 +++++++++ src/cli/helpers.ts | 60 +++ src/cli/notifications.ts | 36 ++ src/cli/template-last-modified.ts | 156 +++++++ src/cli/templates.ts | 342 ++++++++++++++ src/cli/types.ts | 176 +++++++ test/e2e/OutputCtl.ts | 48 ++ test/e2e/assemblies.test.ts | 437 +++++++++++++++++ test/e2e/bills.test.ts | 22 + test/e2e/cli.test.ts | 10 + test/e2e/notifications.test.ts | 20 + test/e2e/templates.test.ts | 325 +++++++++++++ test/e2e/test-utils.ts | 70 +++ test/unit/test-cli.test.ts | 17 +- vitest.config.ts | 4 +- yarn.lock | 137 +++++- 29 files changed, 4103 insertions(+), 325 deletions(-) create mode 100644 src/cli/JobsPromise.ts create mode 100644 src/cli/OutputCtl.ts create mode 100644 src/cli/assemblies-create.ts create mode 100644 src/cli/assemblies.ts create mode 100644 src/cli/bills.ts create mode 100644 src/cli/commands/BaseCommand.ts create mode 100644 src/cli/commands/assemblies.ts create mode 100644 src/cli/commands/auth.ts create mode 100644 src/cli/commands/bills.ts create mode 100644 src/cli/commands/index.ts create mode 100644 src/cli/commands/notifications.ts create mode 100644 src/cli/commands/templates.ts create mode 100644 src/cli/helpers.ts create mode 100644 src/cli/notifications.ts create mode 100644 src/cli/template-last-modified.ts create mode 100644 src/cli/templates.ts create mode 100644 src/cli/types.ts create mode 100644 test/e2e/OutputCtl.ts create mode 100644 test/e2e/assemblies.test.ts create mode 100644 test/e2e/bills.test.ts create mode 100644 test/e2e/cli.test.ts create mode 100644 test/e2e/notifications.test.ts create mode 100644 test/e2e/templates.test.ts create mode 100644 test/e2e/test-utils.ts diff --git a/package.json b/package.json index 9d623b8c..fd0911ab 100644 --- a/package.json +++ b/package.json @@ -20,12 +20,15 @@ "@aws-sdk/client-s3": "^3.891.0", "@aws-sdk/s3-request-presigner": "^3.891.0", "@transloadit/sev-logger": "^0.0.15", + "clipanion": "^4.0.0-rc.4", "debug": "^4.4.3", "form-data": "^4.0.4", "got": "14.4.9", "into-stream": "^9.0.0", "is-stream": "^4.0.1", + "node-watch": "^0.7.4", "p-map": "^7.0.3", + "recursive-readdir": "^2.2.3", "tus-js-client": "^4.3.1", "type-fest": "^4.41.0", "zod": "3.25.76" @@ -33,14 +36,17 @@ "devDependencies": { "@biomejs/biome": "^2.2.4", "@types/debug": "^4.1.12", + "@types/recursive-readdir": "^2.2.4", "@types/temp": "^0.9.4", "@vitest/coverage-v8": "^3.2.4", "badge-maker": "^5.0.2", - "dotenv": "^17.2.2", + "dotenv": "^17.2.3", "execa": "9.6.0", + "image-size": "^2.0.2", "nock": "^14.0.10", "npm-run-all": "^4.1.5", "p-retry": "^7.0.0", + "rimraf": "^6.1.2", "temp": "^0.9.4", "tsx": "4.20.5", "typescript": "5.9.2", @@ -65,6 +71,7 @@ "prepack": "rm -f tsconfig.tsbuildinfo tsconfig.build.tsbuildinfo && tsc --build tsconfig.build.json", "test:unit": "vitest run --coverage ./test/unit", "test:integration": "vitest run ./test/integration", + "test:e2e": "vitest run ./test/e2e", "test:all": "vitest run --coverage", "test": "yarn test:unit" }, diff --git a/src/cli.ts b/src/cli.ts index dc084ed8..bdcd0b93 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -4,310 +4,8 @@ import { realpathSync } from 'node:fs' import path from 'node:path' import process from 'node:process' import { fileURLToPath } from 'node:url' -import { type ZodIssue, z } from 'zod' -import { - assemblyAuthInstructionsSchema, - assemblyInstructionsSchema, -} from './alphalib/types/template.ts' -import type { OptionalAuthParams } from './apiTypes.ts' -import { Transloadit } from './Transloadit.ts' - -type UrlParamPrimitive = string | number | boolean -type UrlParamArray = UrlParamPrimitive[] -type NormalizedUrlParams = Record - -interface RunSigOptions { - providedInput?: string - algorithm?: string -} - -interface RunSmartSigOptions { - providedInput?: string -} - -const smartCdnParamsSchema = z - .object({ - workspace: z.string().min(1, 'workspace is required'), - template: z.string().min(1, 'template is required'), - input: z.union([z.string(), z.number(), z.boolean()]), - url_params: z.record(z.unknown()).optional(), - expire_at_ms: z.union([z.number(), z.string()]).optional(), - }) - .passthrough() - -const cliSignatureParamsSchema = assemblyInstructionsSchema - .extend({ auth: assemblyAuthInstructionsSchema.partial().optional() }) - .partial() - .passthrough() - -export async function readStdin(): Promise { - if (process.stdin.isTTY) return '' - - process.stdin.setEncoding('utf8') - let data = '' - - for await (const chunk of process.stdin) { - data += chunk - } - - return data -} - -function fail(message: string): void { - console.error(message) - process.exitCode = 1 -} - -function formatIssues(issues: ZodIssue[]): string { - return issues - .map((issue) => { - const path = issue.path.join('.') || '(root)' - return `${path}: ${issue.message}` - }) - .join('; ') -} - -function normalizeUrlParam(value: unknown): UrlParamPrimitive | UrlParamArray | undefined { - if (value == null) return undefined - if (Array.isArray(value)) { - const normalized = value.filter( - (item): item is UrlParamPrimitive => - typeof item === 'string' || typeof item === 'number' || typeof item === 'boolean', - ) - return normalized.length > 0 ? normalized : undefined - } - if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { - return value - } - return undefined -} - -function normalizeUrlParams(params?: Record): NormalizedUrlParams | undefined { - if (params == null) return undefined - let normalized: NormalizedUrlParams | undefined - for (const [key, value] of Object.entries(params)) { - const normalizedValue = normalizeUrlParam(value) - if (normalizedValue === undefined) continue - if (normalized == null) normalized = {} - normalized[key] = normalizedValue - } - return normalized -} - -function ensureCredentials(): { authKey: string; authSecret: string } | null { - const authKey = process.env.TRANSLOADIT_KEY || process.env.TRANSLOADIT_AUTH_KEY - const authSecret = process.env.TRANSLOADIT_SECRET || process.env.TRANSLOADIT_AUTH_SECRET - - if (!authKey || !authSecret) { - fail( - 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', - ) - return null - } - - return { authKey, authSecret } -} - -export async function runSig(options: RunSigOptions = {}): Promise { - const credentials = ensureCredentials() - if (credentials == null) return - const { authKey, authSecret } = credentials - const { providedInput, algorithm } = options - - const rawInput = providedInput ?? (await readStdin()) - const input = rawInput.trim() - let params: Record - - if (input === '') { - params = { auth: { key: authKey } } - } else { - let parsed: unknown - try { - parsed = JSON.parse(input) - } catch (error) { - fail(`Failed to parse JSON from stdin: ${(error as Error).message}`) - return - } - - if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { - fail('Invalid params provided via stdin. Expected a JSON object.') - return - } - - const parsedResult = cliSignatureParamsSchema.safeParse(parsed) - if (!parsedResult.success) { - fail(`Invalid params: ${formatIssues(parsedResult.error.issues)}`) - return - } - - const parsedParams = parsedResult.data as Record - const existingAuth = - typeof parsedParams.auth === 'object' && - parsedParams.auth != null && - !Array.isArray(parsedParams.auth) - ? (parsedParams.auth as Record) - : {} - - params = { - ...parsedParams, - auth: { - ...existingAuth, - key: authKey, - }, - } - } - - const client = new Transloadit({ authKey, authSecret }) - try { - const signature = client.calcSignature(params as OptionalAuthParams, algorithm) - process.stdout.write(`${JSON.stringify(signature)}\n`) - } catch (error) { - fail(`Failed to generate signature: ${(error as Error).message}`) - } -} - -export async function runSmartSig(options: RunSmartSigOptions = {}): Promise { - const credentials = ensureCredentials() - if (credentials == null) return - const { authKey, authSecret } = credentials - - const rawInput = options.providedInput ?? (await readStdin()) - const input = rawInput.trim() - if (input === '') { - fail( - 'Missing params provided via stdin. Expected a JSON object with workspace, template, input, and optional Smart CDN parameters.', - ) - return - } - - let parsed: unknown - try { - parsed = JSON.parse(input) - } catch (error) { - fail(`Failed to parse JSON from stdin: ${(error as Error).message}`) - return - } - - if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { - fail('Invalid params provided via stdin. Expected a JSON object.') - return - } - - const parsedResult = smartCdnParamsSchema.safeParse(parsed) - if (!parsedResult.success) { - fail(`Invalid params: ${formatIssues(parsedResult.error.issues)}`) - return - } - - const { workspace, template, input: inputFieldRaw, url_params, expire_at_ms } = parsedResult.data - const urlParams = normalizeUrlParams(url_params as Record | undefined) - - let expiresAt: number | undefined - if (typeof expire_at_ms === 'string') { - const parsedNumber = Number.parseInt(expire_at_ms, 10) - if (Number.isNaN(parsedNumber)) { - fail('Invalid params: expire_at_ms must be a number.') - return - } - expiresAt = parsedNumber - } else { - expiresAt = expire_at_ms - } - - const inputField = typeof inputFieldRaw === 'string' ? inputFieldRaw : String(inputFieldRaw) - - const client = new Transloadit({ authKey, authSecret }) - try { - const signedUrl = client.getSignedSmartCDNUrl({ - workspace, - template, - input: inputField, - urlParams, - expiresAt, - }) - process.stdout.write(`${signedUrl}\n`) - } catch (error) { - fail(`Failed to generate Smart CDN URL: ${(error as Error).message}`) - } -} - -function parseSigArguments(args: string[]): { algorithm?: string } { - let algorithm: string | undefined - let index = 0 - while (index < args.length) { - const arg = args[index] - if (arg === '--algorithm' || arg === '-a') { - const next = args[index + 1] - if (next == null || next.startsWith('-')) { - throw new Error('Missing value for --algorithm option') - } - algorithm = next - index += 2 - continue - } - if (arg.startsWith('--algorithm=')) { - const [, value] = arg.split('=', 2) - if (value === undefined || value === '') { - throw new Error('Missing value for --algorithm option') - } - algorithm = value - index += 1 - continue - } - throw new Error(`Unknown option: ${arg}`) - } - - return { algorithm } -} - -export async function main(args = process.argv.slice(2)): Promise { - const [command, ...commandArgs] = args - - switch (command) { - case 'smart_sig': { - await runSmartSig() - break - } - - case 'sig': { - try { - const { algorithm } = parseSigArguments(commandArgs) - await runSig({ algorithm }) - } catch (error) { - fail((error as Error).message) - } - break - } - - case '-h': - case '--help': - case undefined: { - process.stdout.write( - [ - 'Usage:', - ' npx transloadit smart_sig Read Smart CDN params JSON from stdin and output a signed URL.', - ' npx transloadit sig [--algorithm ] Read params JSON from stdin and output signed payload JSON.', - '', - 'Required JSON fields:', - ' smart_sig: workspace, template, input', - ' sig: none (object is optional)', - 'Optional JSON fields:', - ' smart_sig: expire_at_ms, url_params', - ' sig: auth.expires and any supported assembly params', - '', - 'Environment variables:', - ' TRANSLOADIT_KEY, TRANSLOADIT_SECRET', - ].join('\n'), - ) - if (command === undefined) process.exitCode = 1 - break - } - - default: { - fail(`Unknown command: ${command}`) - } - } -} +import 'dotenv/config' +import { createCli } from './cli/commands/index.ts' const currentFile = realpathSync(fileURLToPath(import.meta.url)) @@ -326,11 +24,20 @@ export function shouldRunCli(invoked?: string): boolean { return resolved === currentFile } +export async function main(args = process.argv.slice(2)): Promise { + const cli = createCli() + const exitCode = await cli.run(args) + if (exitCode !== 0) { + process.exitCode = exitCode + } +} + export function runCliWhenExecuted(): void { if (!shouldRunCli(process.argv[1])) return void main().catch((error) => { - fail((error as Error).message) + console.error((error as Error).message) + process.exitCode = 1 }) } diff --git a/src/cli/JobsPromise.ts b/src/cli/JobsPromise.ts new file mode 100644 index 00000000..941db468 --- /dev/null +++ b/src/cli/JobsPromise.ts @@ -0,0 +1,27 @@ +import { EventEmitter } from 'node:events' + +export default class JobsPromise extends EventEmitter { + private promises: Set> + + constructor() { + super() + this.promises = new Set() + } + + add(promise: Promise): void { + this.promises.add(promise) + promise + .finally(() => this.promises.delete(promise)) + .catch((err: unknown) => { + this.emit('error', err) + }) + } + + promise(): Promise { + const promises: Promise[] = [] + for (const promise of this.promises) { + promises.push(promise) + } + return Promise.all(promises) + } +} diff --git a/src/cli/OutputCtl.ts b/src/cli/OutputCtl.ts new file mode 100644 index 00000000..5ff9ef19 --- /dev/null +++ b/src/cli/OutputCtl.ts @@ -0,0 +1,56 @@ +export interface OutputCtlOptions { + logLevel?: number + jsonMode?: boolean +} + +/** Interface for output controllers (used to allow test mocks) */ +export interface IOutputCtl { + error(msg: unknown): void + warn(msg: unknown): void + info(msg: unknown): void + debug(msg: unknown): void + print(simple: unknown, json: unknown): void +} + +export default class OutputCtl implements IOutputCtl { + private json: boolean + private logLevel: number + + constructor({ logLevel = 0, jsonMode = false }: OutputCtlOptions = {}) { + this.json = jsonMode + this.logLevel = logLevel + + process.stdout.on('error', (err: NodeJS.ErrnoException) => { + if (err.code === 'EPIPE') { + process.exitCode = 0 + } + }) + process.stderr.on('error', (err: NodeJS.ErrnoException) => { + if (err.code === 'EPIPE') { + process.exitCode = 0 + } + }) + } + + error(msg: unknown): void { + console.error('ERROR ', msg) + } + + warn(msg: unknown): void { + if (this.logLevel > 0) console.error('WARNING', msg) + } + + info(msg: unknown): void { + if (this.logLevel > 0) console.error('INFO ', msg) + } + + debug(msg: unknown): void { + if (this.logLevel > 1) console.error('DEBUG ', msg) + } + + print(simple: unknown, json: unknown): void { + if (this.json) console.log(JSON.stringify(json)) + else if (typeof simple === 'string') console.log(simple) + else console.dir(simple, { depth: null }) + } +} diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts new file mode 100644 index 00000000..87a6a5d8 --- /dev/null +++ b/src/cli/assemblies-create.ts @@ -0,0 +1,746 @@ +import EventEmitter from 'node:events' +import fs from 'node:fs' +import fsp from 'node:fs/promises' +import http from 'node:http' +import https from 'node:https' +import path from 'node:path' +import process from 'node:process' +import type { Readable, Writable } from 'node:stream' +import tty from 'node:tty' +import { promisify } from 'node:util' +import type { CreateAssemblyParams } from '../apiTypes.ts' +import type { AssemblyStatus, CreateAssemblyOptions, Transloadit } from '../Transloadit.ts' +import JobsPromise from './JobsPromise.ts' +import type { IOutputCtl } from './OutputCtl.ts' +import { isErrnoException } from './types.ts' + +interface NodeWatcher { + on(event: 'error', listener: (err: Error) => void): void + on(event: 'close', listener: () => void): void + on(event: 'change', listener: (evt: string, filename: string) => void): void + on(event: string, listener: (...args: unknown[]) => void): void +} + +type NodeWatchFn = (path: string, options?: { recursive?: boolean }) => NodeWatcher + +let nodeWatch: NodeWatchFn | undefined + +async function getNodeWatch(): Promise { + if (!nodeWatch) { + const mod = (await import('node-watch')) as unknown as { default: NodeWatchFn } + nodeWatch = mod.default + } + return nodeWatch +} + +// workaround for determining mime-type of stdin +const stdinWithPath = process.stdin as unknown as { path: string } +stdinWithPath.path = '/dev/stdin' + +interface OutStream extends Writable { + path?: string + mtime?: Date +} + +interface Job { + in: Readable | null + out: OutStream | null +} + +type OutstreamProvider = (inpath: string | null, indir?: string) => Promise + +interface StreamRegistry { + [key: string]: OutStream | undefined +} + +interface JobEmitterOptions { + recursive?: boolean + outstreamProvider: OutstreamProvider + streamRegistry: StreamRegistry + watch?: boolean + reprocessStale?: boolean +} + +interface ReaddirJobEmitterOptions { + dir: string + streamRegistry: StreamRegistry + recursive?: boolean + outstreamProvider: OutstreamProvider + topdir?: string +} + +interface SingleJobEmitterOptions { + file: string + streamRegistry: StreamRegistry + outstreamProvider: OutstreamProvider +} + +interface WatchJobEmitterOptions { + file: string + streamRegistry: StreamRegistry + recursive?: boolean + outstreamProvider: OutstreamProvider +} + +interface StatLike { + isDirectory(): boolean +} + +const fstatAsync = promisify(fs.fstat) + +async function myStat( + stdioStream: NodeJS.ReadStream | NodeJS.WriteStream, + filepath: string, +): Promise { + if (filepath === '-') { + const stream = stdioStream as NodeJS.ReadStream & { fd: number } + return await fstatAsync(stream.fd) + } + return await fsp.stat(filepath) +} + +async function ensureDir(dir: string): Promise { + try { + await fsp.mkdir(dir) + } catch (err) { + if (!isErrnoException(err)) throw err + if (err.code === 'EEXIST') { + const stats = await fsp.stat(dir) + if (!stats.isDirectory()) throw err + return + } + if (err.code !== 'ENOENT') throw err + + await ensureDir(path.dirname(dir)) + await fsp.mkdir(dir) + } +} + +function dirProvider(output: string): OutstreamProvider { + return async (inpath, indir = process.cwd()) => { + if (inpath == null || inpath === '-') { + throw new Error('You must provide an input to output to a directory') + } + + let relpath = path.relative(indir, inpath) + relpath = relpath.replace(/^(\.\.\/)+/, '') + const outpath = path.join(output, relpath) + const outdir = path.dirname(outpath) + + await ensureDir(outdir) + let mtime: Date + try { + const stats = await fsp.stat(outpath) + mtime = stats.mtime + } catch (_err) { + mtime = new Date(0) + } + const outstream = fs.createWriteStream(outpath) as OutStream + outstream.mtime = mtime + return outstream + } +} + +function fileProvider(output: string): OutstreamProvider { + const dirExistsP = ensureDir(path.dirname(output)) + return async (_inpath) => { + await dirExistsP + if (output === '-') return process.stdout as OutStream + + let mtime: Date + try { + const stats = await fsp.stat(output) + mtime = stats.mtime + } catch (_err) { + mtime = new Date(0) + } + const outstream = fs.createWriteStream(output) as OutStream + outstream.mtime = mtime + return outstream + } +} + +function nullProvider(): OutstreamProvider { + return async (_inpath) => null +} + +class MyEventEmitter extends EventEmitter { + protected hasEnded: boolean + + constructor() { + super() + this.hasEnded = false + } + + override emit(event: string | symbol, ...args: unknown[]): boolean { + if (this.hasEnded) return false + if (event === 'end' || event === 'error') { + this.hasEnded = true + return super.emit(event, ...args) + } + return super.emit(event, ...args) + } +} + +class ReaddirJobEmitter extends MyEventEmitter { + constructor({ + dir, + streamRegistry, + recursive, + outstreamProvider, + topdir = dir, + }: ReaddirJobEmitterOptions) { + super() + + process.nextTick(() => { + this.processDirectory({ dir, streamRegistry, recursive, outstreamProvider, topdir }).catch( + (err) => { + this.emit('error', err) + }, + ) + }) + } + + private async processDirectory({ + dir, + streamRegistry, + recursive, + outstreamProvider, + topdir, + }: ReaddirJobEmitterOptions & { topdir: string }): Promise { + const files = await fsp.readdir(dir) + + const pendingOperations: Promise[] = [] + + for (const filename of files) { + const file = path.normalize(path.join(dir, filename)) + pendingOperations.push( + this.processFile({ file, streamRegistry, recursive, outstreamProvider, topdir }), + ) + } + + await Promise.all(pendingOperations) + this.emit('end') + } + + private async processFile({ + file, + streamRegistry, + recursive = false, + outstreamProvider, + topdir, + }: { + file: string + streamRegistry: StreamRegistry + recursive?: boolean + outstreamProvider: OutstreamProvider + topdir: string + }): Promise { + const stats = await fsp.stat(file) + + if (stats.isDirectory()) { + if (recursive) { + await new Promise((resolve, reject) => { + const subdirEmitter = new ReaddirJobEmitter({ + dir: file, + streamRegistry, + recursive, + outstreamProvider, + topdir, + }) + subdirEmitter.on('job', (job: Job) => this.emit('job', job)) + subdirEmitter.on('error', (error: Error) => reject(error)) + subdirEmitter.on('end', () => resolve()) + }) + } + } else { + const existing = streamRegistry[file] + if (existing) existing.end() + const outstream = await outstreamProvider(file, topdir) + streamRegistry[file] = outstream ?? undefined + this.emit('job', { in: fs.createReadStream(file), out: outstream }) + } + } +} + +class SingleJobEmitter extends MyEventEmitter { + constructor({ file, streamRegistry, outstreamProvider }: SingleJobEmitterOptions) { + super() + + const normalizedFile = path.normalize(file) + const existing = streamRegistry[normalizedFile] + if (existing) existing.end() + outstreamProvider(normalizedFile).then((outstream) => { + streamRegistry[normalizedFile] = outstream ?? undefined + + let instream: Readable | null + if (normalizedFile === '-') { + if (tty.isatty(process.stdin.fd)) { + instream = null + } else { + instream = process.stdin + } + } else { + instream = fs.createReadStream(normalizedFile) + } + + process.nextTick(() => { + this.emit('job', { in: instream, out: outstream }) + this.emit('end') + }) + }) + } +} + +class InputlessJobEmitter extends MyEventEmitter { + constructor({ + outstreamProvider, + }: { streamRegistry: StreamRegistry; outstreamProvider: OutstreamProvider }) { + super() + + process.nextTick(() => { + outstreamProvider(null).then((outstream) => { + try { + this.emit('job', { in: null, out: outstream }) + } catch (err) { + this.emit('error', err) + } + + this.emit('end') + }) + }) + } +} + +class NullJobEmitter extends MyEventEmitter { + constructor() { + super() + process.nextTick(() => this.emit('end')) + } +} + +class WatchJobEmitter extends MyEventEmitter { + constructor({ file, streamRegistry, recursive, outstreamProvider }: WatchJobEmitterOptions) { + super() + + this.init({ file, streamRegistry, recursive, outstreamProvider }).catch((err) => { + this.emit('error', err) + }) + } + + private async init({ + file, + streamRegistry, + recursive, + outstreamProvider, + }: WatchJobEmitterOptions): Promise { + const stats = await fsp.stat(file) + const topdir = stats.isDirectory() ? file : undefined + + const watchFn = await getNodeWatch() + const watcher = watchFn(file, { recursive }) + + watcher.on('error', (err: Error) => this.emit('error', err)) + watcher.on('close', () => this.emit('end')) + watcher.on('change', (_evt: string, filename: string) => { + const normalizedFile = path.normalize(filename) + this.handleChange(normalizedFile, topdir, streamRegistry, outstreamProvider).catch((err) => { + this.emit('error', err) + }) + }) + } + + private async handleChange( + normalizedFile: string, + topdir: string | undefined, + streamRegistry: StreamRegistry, + outstreamProvider: OutstreamProvider, + ): Promise { + const stats = await fsp.stat(normalizedFile) + if (stats.isDirectory()) return + + const existing = streamRegistry[normalizedFile] + if (existing) existing.end() + + const outstream = await outstreamProvider(normalizedFile, topdir) + streamRegistry[normalizedFile] = outstream ?? undefined + + const instream = fs.createReadStream(normalizedFile) + this.emit('job', { in: instream, out: outstream }) + } +} + +class MergedJobEmitter extends MyEventEmitter { + constructor(...jobEmitters: MyEventEmitter[]) { + super() + + let ncomplete = 0 + + for (const jobEmitter of jobEmitters) { + jobEmitter.on('error', (err: Error) => this.emit('error', err)) + jobEmitter.on('job', (job: Job) => this.emit('job', job)) + jobEmitter.on('end', () => { + if (++ncomplete === jobEmitters.length) this.emit('end') + }) + } + + if (jobEmitters.length === 0) { + this.emit('end') + } + } +} + +class ConcattedJobEmitter extends MyEventEmitter { + constructor(emitterFn: () => MyEventEmitter, ...emitterFns: (() => MyEventEmitter)[]) { + super() + + const emitter = emitterFn() + + emitter.on('error', (err: Error) => this.emit('error', err)) + emitter.on('job', (job: Job) => this.emit('job', job)) + + if (emitterFns.length === 0) { + emitter.on('end', () => this.emit('end')) + } else { + emitter.on('end', () => { + const firstFn = emitterFns[0] + if (!firstFn) { + this.emit('end') + return + } + const restEmitter = new ConcattedJobEmitter(firstFn, ...emitterFns.slice(1)) + restEmitter.on('error', (err: Error) => this.emit('error', err)) + restEmitter.on('job', (job: Job) => this.emit('job', job)) + restEmitter.on('end', () => this.emit('end')) + }) + } + } +} + +function detectConflicts(jobEmitter: EventEmitter): MyEventEmitter { + const emitter = new MyEventEmitter() + const outfileAssociations: Record = {} + + jobEmitter.on('end', () => emitter.emit('end')) + jobEmitter.on('error', (err: Error) => emitter.emit('error', err)) + jobEmitter.on('job', (job: Job) => { + if (job.in == null || job.out == null) { + emitter.emit('job', job) + return + } + const inPath = (job.in as fs.ReadStream).path as string + const outPath = job.out.path as string + if (Object.hasOwn(outfileAssociations, outPath) && outfileAssociations[outPath] !== inPath) { + emitter.emit( + 'error', + new Error(`Output collision between '${inPath}' and '${outfileAssociations[outPath]}'`), + ) + } else { + outfileAssociations[outPath] = inPath + emitter.emit('job', job) + } + }) + + return emitter +} + +function dismissStaleJobs(jobEmitter: EventEmitter): MyEventEmitter { + const emitter = new MyEventEmitter() + + const jobsPromise = new JobsPromise() + + jobEmitter.on('end', () => jobsPromise.promise().then(() => emitter.emit('end'))) + jobEmitter.on('error', (err: Error) => emitter.emit('error', err)) + jobEmitter.on('job', (job: Job) => { + if (job.in == null || job.out == null) { + emitter.emit('job', job) + return + } + + const inPath = (job.in as fs.ReadStream).path as string + jobsPromise.add( + fsp + .stat(inPath) + .then((stats) => { + const inM = stats.mtime + const outM = job.out?.mtime ?? new Date(0) + + if (outM <= inM) emitter.emit('job', job) + }) + .catch(() => { + emitter.emit('job', job) + }), + ) + }) + + return emitter +} + +function makeJobEmitter( + inputs: string[], + { + recursive, + outstreamProvider, + streamRegistry, + watch: watchOption, + reprocessStale, + }: JobEmitterOptions, +): MyEventEmitter { + const emitter = new EventEmitter() + + const emitterFns: (() => MyEventEmitter)[] = [] + const watcherFns: (() => MyEventEmitter)[] = [] + + async function processInputs(): Promise { + for (const input of inputs) { + if (input === '-') { + emitterFns.push( + () => new SingleJobEmitter({ file: input, outstreamProvider, streamRegistry }), + ) + watcherFns.push(() => new NullJobEmitter()) + } else { + const stats = await fsp.stat(input) + if (stats.isDirectory()) { + emitterFns.push( + () => + new ReaddirJobEmitter({ dir: input, recursive, outstreamProvider, streamRegistry }), + ) + watcherFns.push( + () => + new WatchJobEmitter({ file: input, recursive, outstreamProvider, streamRegistry }), + ) + } else { + emitterFns.push( + () => new SingleJobEmitter({ file: input, outstreamProvider, streamRegistry }), + ) + watcherFns.push( + () => + new WatchJobEmitter({ file: input, recursive, outstreamProvider, streamRegistry }), + ) + } + } + } + + if (inputs.length === 0) { + emitterFns.push(() => new InputlessJobEmitter({ outstreamProvider, streamRegistry })) + } + + startEmitting() + } + + function startEmitting(): void { + let source: MyEventEmitter = new MergedJobEmitter(...emitterFns.map((f) => f())) + + if (watchOption) { + source = new ConcattedJobEmitter( + () => source, + () => new MergedJobEmitter(...watcherFns.map((f) => f())), + ) + } + + source.on('job', (job: Job) => emitter.emit('job', job)) + source.on('error', (err: Error) => emitter.emit('error', err)) + source.on('end', () => emitter.emit('end')) + } + + processInputs().catch((err) => { + emitter.emit('error', err) + }) + + const stalefilter = reprocessStale ? (x: EventEmitter) => x as MyEventEmitter : dismissStaleJobs + return stalefilter(detectConflicts(emitter)) +} + +export interface AssembliesCreateOptions { + steps?: string + template?: string + fields?: Record + watch?: boolean + recursive?: boolean + inputs: string[] + output?: string | null + del?: boolean + reprocessStale?: boolean +} + +export default async function run( + outputctl: IOutputCtl, + client: Transloadit, + { + steps, + template, + fields, + watch: watchOption, + recursive, + inputs, + output, + del, + reprocessStale, + }: AssembliesCreateOptions, +): Promise { + // Quick fix for https://github.com/transloadit/transloadify/issues/13 + // Only default to stdout when output is undefined (not provided), not when explicitly null + let resolvedOutput = output + if (resolvedOutput === undefined && !process.stdout.isTTY) resolvedOutput = '-' + + // Read steps file async before entering the Promise constructor + let stepsData: CreateAssemblyParams['steps'] | undefined + if (steps) { + const stepsContent = await fsp.readFile(steps, 'utf8') + stepsData = JSON.parse(stepsContent) as CreateAssemblyParams['steps'] + } + + // Determine output stat async before entering the Promise constructor + let outstat: StatLike | undefined + if (resolvedOutput != null) { + try { + outstat = await myStat(process.stdout, resolvedOutput) + } catch (e) { + if (!isErrnoException(e)) throw e + if (e.code !== 'ENOENT') throw e + outstat = { isDirectory: () => false } + } + + if (!outstat.isDirectory() && inputs.length !== 0) { + const firstInput = inputs[0] + if (firstInput) { + const firstInputStat = await myStat(process.stdin, firstInput) + if (inputs.length > 1 || firstInputStat.isDirectory()) { + const msg = 'Output must be a directory when specifying multiple inputs' + outputctl.error(msg) + throw new Error(msg) + } + } + } + } + + return new Promise((resolve, reject) => { + const params: CreateAssemblyParams = ( + stepsData ? { steps: stepsData } : { template_id: template } + ) as CreateAssemblyParams + if (fields) { + params.fields = fields + } + + const outstreamProvider: OutstreamProvider = + resolvedOutput == null + ? nullProvider() + : outstat?.isDirectory() + ? dirProvider(resolvedOutput) + : fileProvider(resolvedOutput) + const streamRegistry: StreamRegistry = {} + + const emitter = makeJobEmitter(inputs, { + recursive, + watch: watchOption, + outstreamProvider, + streamRegistry, + reprocessStale, + }) + + const jobsPromise = new JobsPromise() + emitter.on('job', (job: Job) => { + const inPath = job.in ? ((job.in as fs.ReadStream).path as string | undefined) : undefined + const outPath = job.out?.path + outputctl.debug(`GOT JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) + + let superceded = false + if (job.out != null) + job.out.on('finish', () => { + superceded = true + }) + + const createOptions: CreateAssemblyOptions = { params } + if (job.in != null) { + createOptions.uploads = { in: job.in } + } + + const jobPromise = (async () => { + const result = await client.createAssembly(createOptions) + if (superceded) return + + const assemblyId = result.assembly_id + if (!assemblyId) throw new Error('No assembly_id in result') + + let assembly: AssemblyStatus = await client.getAssembly(assemblyId) + + while ( + assembly.ok !== 'ASSEMBLY_COMPLETED' && + assembly.ok !== 'ASSEMBLY_CANCELED' && + !assembly.error + ) { + if (superceded) return + outputctl.debug(`Assembly status: ${assembly.ok}`) + await new Promise((resolve) => setTimeout(resolve, 1000)) + assembly = await client.getAssembly(assemblyId) + } + + if (assembly.error || (assembly.ok && assembly.ok !== 'ASSEMBLY_COMPLETED')) { + const msg = `Assembly failed: ${assembly.error || assembly.message} (Status: ${assembly.ok})` + outputctl.error(msg) + throw new Error(msg) + } + + if (!assembly.results) throw new Error('No results in assembly') + const resultsKeys = Object.keys(assembly.results) + const firstKey = resultsKeys[0] + if (!firstKey) throw new Error('No results in assembly') + const firstResult = assembly.results[firstKey] + if (!firstResult || !firstResult[0]) throw new Error('No results in assembly') + const resulturl = firstResult[0].url + + if (job.out != null && resulturl) { + outputctl.debug('DOWNLOADING') + await new Promise((resolve, reject) => { + const get = resulturl.startsWith('https') ? https.get : http.get + get(resulturl, (res) => { + if (res.statusCode !== 200) { + const msg = `Server returned http status ${res.statusCode}` + outputctl.error(msg) + return reject(new Error(msg)) + } + + if (superceded) return resolve() + + if (!job.out) { + return reject(new Error('Job output stream is undefined')) + } + res.pipe(job.out) + job.out.on('finish', () => res.unpipe()) + res.on('end', () => resolve()) + }).on('error', (err) => { + outputctl.error(err.message) + reject(err) + }) + }) + } + await completeJob() + })() + + jobsPromise.add(jobPromise) + + async function completeJob(): Promise { + const inPath = job.in ? ((job.in as fs.ReadStream).path as string | undefined) : undefined + const outPath = job.out?.path + outputctl.debug(`COMPLETED ${inPath ?? 'null'} ${outPath ?? 'null'}`) + + if (del && job.in != null && inPath) { + await fsp.unlink(inPath) + } + } + }) + + jobsPromise.on('error', (err: Error) => { + outputctl.error(err) + }) + + emitter.on('error', (err: Error) => { + outputctl.error(err) + reject(err) + }) + + emitter.on('end', () => { + resolve(jobsPromise.promise()) + }) + }) +} diff --git a/src/cli/assemblies.ts b/src/cli/assemblies.ts new file mode 100644 index 00000000..a6d627d8 --- /dev/null +++ b/src/cli/assemblies.ts @@ -0,0 +1,151 @@ +import { z } from 'zod' +import { tryCatch } from '../alphalib/tryCatch.ts' +import type { Transloadit } from '../Transloadit.ts' +import assembliesCreate from './assemblies-create.ts' +import { createReadStream, formatAPIError, stream2buf } from './helpers.ts' +import type { IOutputCtl } from './OutputCtl.ts' +import { ensureError } from './types.ts' + +export const create = assembliesCreate + +export interface AssemblyListOptions { + before?: string + after?: string + fields?: string[] + keywords?: string[] + pagesize?: number +} + +export interface AssemblyGetOptions { + assemblies: string[] +} + +export interface AssemblyDeleteOptions { + assemblies: string[] +} + +export interface AssemblyReplayOptions { + fields?: Record + reparse?: boolean + steps?: string + notify_url?: string + assemblies: string[] +} + +const AssemblySchema = z.object({ + id: z.string(), +}) + +export function list( + output: IOutputCtl, + client: Transloadit, + { before, after, fields, keywords }: AssemblyListOptions, +): Promise { + const assemblies = client.streamAssemblies({ + fromdate: after, + todate: before, + keywords, + }) + + assemblies.on('readable', () => { + const assembly: unknown = assemblies.read() + if (assembly == null) return + + const parsed = AssemblySchema.safeParse(assembly) + if (!parsed.success) return + + if (fields == null) { + output.print(parsed.data.id, assembly) + } else { + const assemblyRecord = assembly as Record + output.print(fields.map((field) => assemblyRecord[field]).join(' '), assembly) + } + }) + + return new Promise((resolve) => { + assemblies.on('end', resolve) + assemblies.on('error', (err: unknown) => { + output.error(formatAPIError(err)) + resolve() + }) + }) +} + +export async function get( + output: IOutputCtl, + client: Transloadit, + { assemblies }: AssemblyGetOptions, +): Promise { + for (const assembly of assemblies) { + await new Promise((resolve) => setTimeout(resolve, 1000)) + const [err, result] = await tryCatch(client.getAssembly(assembly)) + if (err) { + output.error(formatAPIError(err)) + throw ensureError(err) + } + output.print(result, result) + } +} + +async function _delete( + output: IOutputCtl, + client: Transloadit, + { assemblies }: AssemblyDeleteOptions, +): Promise { + const promises = assemblies.map(async (assembly) => { + const [err] = await tryCatch(client.cancelAssembly(assembly)) + if (err) { + output.error(formatAPIError(err)) + } + }) + await Promise.all(promises) +} + +export { _delete as delete } + +const StepsSchema = z.record(z.string(), z.unknown()) + +export async function replay( + output: IOutputCtl, + client: Transloadit, + { fields, reparse, steps, notify_url, assemblies }: AssemblyReplayOptions, +): Promise { + if (steps) { + try { + const buf = await new Promise((resolve, reject) => { + stream2buf(createReadStream(steps), (err, buf) => { + if (err) reject(err) + else if (buf) resolve(buf) + else reject(new Error('No buffer received')) + }) + }) + const parsed: unknown = JSON.parse(buf.toString()) + const validated = StepsSchema.safeParse(parsed) + if (!validated.success) { + throw new Error('Invalid steps format') + } + await apiCall(validated.data) + } catch (err) { + const error = ensureError(err) + output.error(error.message) + } + } else { + await apiCall() + } + + async function apiCall(_steps?: Record): Promise { + const promises = assemblies.map(async (assembly) => { + const [err] = await tryCatch( + client.replayAssembly(assembly, { + reparse_template: reparse ? 1 : 0, + fields, + notify_url, + }), + ) + if (err) { + output.error(formatAPIError(err)) + } + }) + await Promise.all(promises) + } +} diff --git a/src/cli/bills.ts b/src/cli/bills.ts new file mode 100644 index 00000000..5a0a0a88 --- /dev/null +++ b/src/cli/bills.ts @@ -0,0 +1,36 @@ +import { z } from 'zod' +import { tryCatch } from '../alphalib/tryCatch.ts' +import type { Transloadit } from '../Transloadit.ts' +import { formatAPIError } from './helpers.ts' +import type { IOutputCtl } from './OutputCtl.ts' + +export interface BillsGetOptions { + months: string[] +} + +const BillResponseSchema = z.object({ + total: z.number(), +}) + +export async function get( + output: IOutputCtl, + client: Transloadit, + { months }: BillsGetOptions, +): Promise { + const requests = months.map((month) => client.getBill(month)) + + const [err, results] = await tryCatch(Promise.all(requests)) + if (err) { + output.error(formatAPIError(err)) + return + } + + for (const result of results) { + const parsed = BillResponseSchema.safeParse(result) + if (parsed.success) { + output.print(`$${parsed.data.total}`, result) + } else { + output.print('Unable to parse bill response', result) + } + } +} diff --git a/src/cli/commands/BaseCommand.ts b/src/cli/commands/BaseCommand.ts new file mode 100644 index 00000000..0d1718fe --- /dev/null +++ b/src/cli/commands/BaseCommand.ts @@ -0,0 +1,73 @@ +import process from 'node:process' +import { Command, Option } from 'clipanion' +import 'dotenv/config' +import { Transloadit as TransloaditClient } from '../../Transloadit.ts' +import OutputCtl, { type IOutputCtl } from '../OutputCtl.ts' + +export abstract class BaseCommand extends Command { + verbose = Option.Boolean('-v,--verbose', false, { + description: 'Enable debug output', + }) + + quiet = Option.Boolean('-q,--quiet', false, { + description: 'Disable warnings', + }) + + json = Option.Boolean('-j,--json', false, { + description: 'Output in JSON format', + }) + + protected output!: IOutputCtl + protected client!: TransloaditClient + + protected get logLevel(): number { + if (this.verbose) return 2 + if (this.quiet) return 0 + return 1 + } + + protected setupOutput(): void { + this.output = new OutputCtl({ + logLevel: this.logLevel, + jsonMode: this.json, + }) + } + + protected setupClient(): boolean { + if (!process.env.TRANSLOADIT_KEY || !process.env.TRANSLOADIT_SECRET) { + this.output.error( + 'Please provide API authentication in the environment variables TRANSLOADIT_KEY and TRANSLOADIT_SECRET', + ) + return false + } + + this.client = new TransloaditClient({ + authKey: process.env.TRANSLOADIT_KEY, + authSecret: process.env.TRANSLOADIT_SECRET, + }) + return true + } + + abstract override execute(): Promise +} + +export abstract class AuthenticatedCommand extends BaseCommand { + override async execute(): Promise { + this.setupOutput() + if (!this.setupClient()) { + return 1 + } + return await this.run() + } + + protected abstract run(): Promise +} + +export abstract class UnauthenticatedCommand extends BaseCommand { + override async execute(): Promise { + this.setupOutput() + return await this.run() + } + + protected abstract run(): Promise +} diff --git a/src/cli/commands/assemblies.ts b/src/cli/commands/assemblies.ts new file mode 100644 index 00000000..7989b18a --- /dev/null +++ b/src/cli/commands/assemblies.ts @@ -0,0 +1,279 @@ +import process from 'node:process' +import { Command, Option } from 'clipanion' +import * as assemblies from '../assemblies.ts' +import assembliesCreate from '../assemblies-create.ts' +import { AuthenticatedCommand } from './BaseCommand.ts' + +export class AssembliesCreateCommand extends AuthenticatedCommand { + static override paths = [ + ['assemblies', 'create'], + ['assembly', 'create'], + ['a', 'create'], + ['a', 'c'], + ] + + static override usage = Command.Usage({ + category: 'Assemblies', + description: 'Create assemblies to process media', + details: ` + Create assemblies to process media files using Transloadit. + You must specify either --steps or --template. + `, + examples: [ + [ + 'Process a file with steps', + 'transloadit assemblies create --steps steps.json -i input.jpg -o output.jpg', + ], + [ + 'Process with a template', + 'transloadit assemblies create --template TEMPLATE_ID -i input.jpg -o output/', + ], + [ + 'Watch for changes', + 'transloadit assemblies create --steps steps.json -i input/ -o output/ --watch', + ], + ], + }) + + steps = Option.String('--steps,-s', { + description: 'Specify assembly instructions with a JSON file', + }) + + template = Option.String('--template,-t', { + description: 'Specify a template to use for these assemblies', + }) + + inputs = Option.Array('--input,-i', { + description: 'Provide an input file or a directory', + }) + + outputPath = Option.String('--output,-o', { + description: 'Specify an output file or directory', + }) + + fields = Option.Array('--field,-f', { + description: 'Set a template field (KEY=VAL)', + }) + + watch = Option.Boolean('--watch,-w', false, { + description: 'Watch inputs for changes', + }) + + recursive = Option.Boolean('--recursive,-r', false, { + description: 'Enumerate input directories recursively', + }) + + deleteAfterProcessing = Option.Boolean('--delete-after-processing,-d', false, { + description: 'Delete input files after they are processed', + }) + + reprocessStale = Option.Boolean('--reprocess-stale', false, { + description: 'Process inputs even if output is newer', + }) + + protected async run(): Promise { + if (!this.steps && !this.template) { + this.output.error('assemblies create requires exactly one of either --steps or --template') + return 1 + } + if (this.steps && this.template) { + this.output.error('assemblies create requires exactly one of either --steps or --template') + return 1 + } + + const inputList = this.inputs ?? [] + if (inputList.length === 0 && this.watch) { + this.output.error('assemblies create --watch requires at least one input') + return 1 + } + + // Default to stdin if no inputs and not a TTY + if (inputList.length === 0 && !process.stdin.isTTY) { + inputList.push('-') + } + + const fieldsMap: Record = {} + for (const field of this.fields ?? []) { + const eqIndex = field.indexOf('=') + if (eqIndex === -1) { + this.output.error(`invalid argument for --field: '${field}'`) + return 1 + } + const key = field.slice(0, eqIndex) + const value = field.slice(eqIndex + 1) + fieldsMap[key] = value + } + + await assembliesCreate(this.output, this.client, { + steps: this.steps, + template: this.template, + fields: fieldsMap, + watch: this.watch, + recursive: this.recursive, + inputs: inputList, + output: this.outputPath ?? null, + del: this.deleteAfterProcessing, + reprocessStale: this.reprocessStale, + }) + return undefined + } +} + +export class AssembliesListCommand extends AuthenticatedCommand { + static override paths = [ + ['assemblies', 'list'], + ['assembly', 'list'], + ['a', 'list'], + ['a', 'l'], + ] + + static override usage = Command.Usage({ + category: 'Assemblies', + description: 'List assemblies matching given criteria', + examples: [ + ['List recent assemblies', 'transloadit assemblies list'], + ['List assemblies after a date', 'transloadit assemblies list --after 2024-01-01'], + ], + }) + + before = Option.String('--before,-b', { + description: 'Return only assemblies created before specified date', + }) + + after = Option.String('--after,-a', { + description: 'Return only assemblies created after specified date', + }) + + keywords = Option.String('--keywords', { + description: 'Comma-separated list of keywords to match assemblies', + }) + + fields = Option.String('--fields', { + description: 'Comma-separated list of fields to return for each assembly', + }) + + protected async run(): Promise { + const keywordList = this.keywords ? this.keywords.split(',') : undefined + const fieldList = this.fields ? this.fields.split(',') : undefined + + await assemblies.list(this.output, this.client, { + before: this.before, + after: this.after, + keywords: keywordList, + fields: fieldList, + }) + return undefined + } +} + +export class AssembliesGetCommand extends AuthenticatedCommand { + static override paths = [ + ['assemblies', 'get'], + ['assembly', 'get'], + ['a', 'get'], + ['a', 'g'], + ] + + static override usage = Command.Usage({ + category: 'Assemblies', + description: 'Fetch assembly statuses', + examples: [['Get assembly status', 'transloadit assemblies get ASSEMBLY_ID']], + }) + + assemblyIds = Option.Rest({ required: 1 }) + + protected async run(): Promise { + await assemblies.get(this.output, this.client, { + assemblies: this.assemblyIds, + }) + return undefined + } +} + +export class AssembliesDeleteCommand extends AuthenticatedCommand { + static override paths = [ + ['assemblies', 'delete'], + ['assembly', 'delete'], + ['a', 'delete'], + ['a', 'd'], + ['assemblies', 'cancel'], + ['assembly', 'cancel'], + ] + + static override usage = Command.Usage({ + category: 'Assemblies', + description: 'Cancel assemblies', + examples: [['Cancel an assembly', 'transloadit assemblies delete ASSEMBLY_ID']], + }) + + assemblyIds = Option.Rest({ required: 1 }) + + protected async run(): Promise { + await assemblies.delete(this.output, this.client, { + assemblies: this.assemblyIds, + }) + return undefined + } +} + +export class AssembliesReplayCommand extends AuthenticatedCommand { + static override paths = [ + ['assemblies', 'replay'], + ['assembly', 'replay'], + ['a', 'replay'], + ['a', 'r'], + ] + + static override usage = Command.Usage({ + category: 'Assemblies', + description: 'Replay assemblies', + examples: [ + ['Replay an assembly', 'transloadit assemblies replay ASSEMBLY_ID'], + [ + 'Replay with new notify URL', + 'transloadit assemblies replay --notify-url https://example.com/notify ASSEMBLY_ID', + ], + ], + }) + + fields = Option.Array('--field,-f', { + description: 'Set a template field (KEY=VAL)', + }) + + steps = Option.String('--steps,-s', { + description: 'Override assembly instructions', + }) + + notifyUrl = Option.String('--notify-url', { + description: 'Specify a new URL for assembly notifications', + }) + + reparseTemplate = Option.Boolean('--reparse-template', false, { + description: 'Use the most up-to-date version of the template', + }) + + assemblyIds = Option.Rest({ required: 1 }) + + protected async run(): Promise { + const fieldsMap: Record = {} + for (const field of this.fields ?? []) { + const eqIndex = field.indexOf('=') + if (eqIndex === -1) { + this.output.error(`invalid argument for --field: '${field}'`) + return 1 + } + const key = field.slice(0, eqIndex) + const value = field.slice(eqIndex + 1) + fieldsMap[key] = value + } + + await assemblies.replay(this.output, this.client, { + fields: fieldsMap, + reparse: this.reparseTemplate, + steps: this.steps, + notify_url: this.notifyUrl, + assemblies: this.assemblyIds, + }) + return undefined + } +} diff --git a/src/cli/commands/auth.ts b/src/cli/commands/auth.ts new file mode 100644 index 00000000..30401e09 --- /dev/null +++ b/src/cli/commands/auth.ts @@ -0,0 +1,443 @@ +import process from 'node:process' +import { Command, Option } from 'clipanion' +import { type ZodIssue, z } from 'zod' +import { + assemblyAuthInstructionsSchema, + assemblyInstructionsSchema, +} from '../../alphalib/types/template.ts' +import type { OptionalAuthParams } from '../../apiTypes.ts' +import { Transloadit } from '../../Transloadit.ts' +import { UnauthenticatedCommand } from './BaseCommand.ts' + +type UrlParamPrimitive = string | number | boolean +type UrlParamArray = UrlParamPrimitive[] +type NormalizedUrlParams = Record + +const smartCdnParamsSchema = z + .object({ + workspace: z.string().min(1, 'workspace is required'), + template: z.string().min(1, 'template is required'), + input: z.union([z.string(), z.number(), z.boolean()]), + url_params: z.record(z.unknown()).optional(), + expire_at_ms: z.union([z.number(), z.string()]).optional(), + }) + .passthrough() + +const cliSignatureParamsSchema = assemblyInstructionsSchema + .extend({ auth: assemblyAuthInstructionsSchema.partial().optional() }) + .partial() + .passthrough() + +function formatIssues(issues: ZodIssue[]): string { + return issues + .map((issue) => { + const path = issue.path.join('.') || '(root)' + return `${path}: ${issue.message}` + }) + .join('; ') +} + +function normalizeUrlParam(value: unknown): UrlParamPrimitive | UrlParamArray | undefined { + if (value == null) return undefined + if (Array.isArray(value)) { + const normalized = value.filter( + (item): item is UrlParamPrimitive => + typeof item === 'string' || typeof item === 'number' || typeof item === 'boolean', + ) + return normalized.length > 0 ? normalized : undefined + } + if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { + return value + } + return undefined +} + +function normalizeUrlParams(params?: Record): NormalizedUrlParams | undefined { + if (params == null) return undefined + let normalized: NormalizedUrlParams | undefined + for (const [key, value] of Object.entries(params)) { + const normalizedValue = normalizeUrlParam(value) + if (normalizedValue === undefined) continue + if (normalized == null) normalized = {} + normalized[key] = normalizedValue + } + return normalized +} + +async function readStdin(): Promise { + if (process.stdin.isTTY) return '' + + process.stdin.setEncoding('utf8') + let data = '' + + for await (const chunk of process.stdin) { + data += chunk + } + + return data +} + +function ensureCredentials(): { authKey: string; authSecret: string } | null { + const authKey = process.env.TRANSLOADIT_KEY || process.env.TRANSLOADIT_AUTH_KEY + const authSecret = process.env.TRANSLOADIT_SECRET || process.env.TRANSLOADIT_AUTH_SECRET + + if (!authKey || !authSecret) { + return null + } + + return { authKey, authSecret } +} + +// Testable helper functions exported for unit tests +export interface RunSigOptions { + providedInput?: string + algorithm?: string +} + +export interface RunSmartSigOptions { + providedInput?: string +} + +function fail(message: string): void { + console.error(message) + process.exitCode = 1 +} + +export async function runSig(options: RunSigOptions = {}): Promise { + const credentials = ensureCredentials() + if (credentials == null) { + fail( + 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', + ) + return + } + const { authKey, authSecret } = credentials + const { providedInput, algorithm } = options + + const rawInput = providedInput ?? (await readStdin()) + const input = rawInput.trim() + let params: Record + + if (input === '') { + params = { auth: { key: authKey } } + } else { + let parsed: unknown + try { + parsed = JSON.parse(input) + } catch (error) { + fail(`Failed to parse JSON from stdin: ${(error as Error).message}`) + return + } + + if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { + fail('Invalid params provided via stdin. Expected a JSON object.') + return + } + + const parsedResult = cliSignatureParamsSchema.safeParse(parsed) + if (!parsedResult.success) { + fail(`Invalid params: ${formatIssues(parsedResult.error.issues)}`) + return + } + + const parsedParams = parsedResult.data as Record + const existingAuth = + typeof parsedParams.auth === 'object' && + parsedParams.auth != null && + !Array.isArray(parsedParams.auth) + ? (parsedParams.auth as Record) + : {} + + params = { + ...parsedParams, + auth: { + ...existingAuth, + key: authKey, + }, + } + } + + const client = new Transloadit({ authKey, authSecret }) + try { + const signature = client.calcSignature(params as OptionalAuthParams, algorithm) + process.stdout.write(`${JSON.stringify(signature)}\n`) + } catch (error) { + fail(`Failed to generate signature: ${(error as Error).message}`) + } +} + +export async function runSmartSig(options: RunSmartSigOptions = {}): Promise { + const credentials = ensureCredentials() + if (credentials == null) { + fail( + 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', + ) + return + } + const { authKey, authSecret } = credentials + + const rawInput = options.providedInput ?? (await readStdin()) + const input = rawInput.trim() + if (input === '') { + fail( + 'Missing params provided via stdin. Expected a JSON object with workspace, template, input, and optional Smart CDN parameters.', + ) + return + } + + let parsed: unknown + try { + parsed = JSON.parse(input) + } catch (error) { + fail(`Failed to parse JSON from stdin: ${(error as Error).message}`) + return + } + + if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { + fail('Invalid params provided via stdin. Expected a JSON object.') + return + } + + const parsedResult = smartCdnParamsSchema.safeParse(parsed) + if (!parsedResult.success) { + fail(`Invalid params: ${formatIssues(parsedResult.error.issues)}`) + return + } + + const { workspace, template, input: inputFieldRaw, url_params, expire_at_ms } = parsedResult.data + const urlParams = normalizeUrlParams(url_params as Record | undefined) + + let expiresAt: number | undefined + if (typeof expire_at_ms === 'string') { + const parsedNumber = Number.parseInt(expire_at_ms, 10) + if (Number.isNaN(parsedNumber)) { + fail('Invalid params: expire_at_ms must be a number.') + return + } + expiresAt = parsedNumber + } else { + expiresAt = expire_at_ms + } + + const inputField = typeof inputFieldRaw === 'string' ? inputFieldRaw : String(inputFieldRaw) + + const client = new Transloadit({ authKey, authSecret }) + try { + const signedUrl = client.getSignedSmartCDNUrl({ + workspace, + template, + input: inputField, + urlParams, + expiresAt, + }) + process.stdout.write(`${signedUrl}\n`) + } catch (error) { + fail(`Failed to generate Smart CDN URL: ${(error as Error).message}`) + } +} + +/** + * Generate a signature for assembly params + */ +export class SignatureCommand extends UnauthenticatedCommand { + static override paths = [ + ['auth', 'signature'], + ['auth', 'sig'], + ['signature'], + ['sig'], // BC alias + ] + + static override usage = Command.Usage({ + category: 'Auth', + description: 'Generate a signature for assembly params', + details: ` + Read params JSON from stdin and output signed payload JSON. + If no input is provided, generates a signature with default params. + `, + examples: [ + ['Generate signature', 'echo \'{"steps":{}}\' | transloadit signature'], + ['With algorithm', 'echo \'{"steps":{}}\' | transloadit signature --algorithm sha384'], + ['Using alias', 'echo \'{"steps":{}}\' | transloadit sig'], + ], + }) + + algorithm = Option.String('--algorithm,-a', { + description: 'Signature algorithm to use (sha1, sha256, sha384, sha512)', + }) + + protected async run(): Promise { + const credentials = ensureCredentials() + if (credentials == null) { + this.output.error( + 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', + ) + return 1 + } + const { authKey, authSecret } = credentials + + const rawInput = await readStdin() + const input = rawInput.trim() + let params: Record + + if (input === '') { + params = { auth: { key: authKey } } + } else { + let parsed: unknown + try { + parsed = JSON.parse(input) + } catch (error) { + this.output.error(`Failed to parse JSON from stdin: ${(error as Error).message}`) + return 1 + } + + if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { + this.output.error('Invalid params provided via stdin. Expected a JSON object.') + return 1 + } + + const parsedResult = cliSignatureParamsSchema.safeParse(parsed) + if (!parsedResult.success) { + this.output.error(`Invalid params: ${formatIssues(parsedResult.error.issues)}`) + return 1 + } + + const parsedParams = parsedResult.data as Record + const existingAuth = + typeof parsedParams.auth === 'object' && + parsedParams.auth != null && + !Array.isArray(parsedParams.auth) + ? (parsedParams.auth as Record) + : {} + + params = { + ...parsedParams, + auth: { + ...existingAuth, + key: authKey, + }, + } + } + + const client = new Transloadit({ authKey, authSecret }) + try { + const signature = client.calcSignature(params as OptionalAuthParams, this.algorithm) + process.stdout.write(`${JSON.stringify(signature)}\n`) + } catch (error) { + this.output.error(`Failed to generate signature: ${(error as Error).message}`) + return 1 + } + + return undefined + } +} + +/** + * Generate a signed Smart CDN URL + */ +export class SmartCdnSignatureCommand extends UnauthenticatedCommand { + static override paths = [ + ['auth', 'smart-cdn'], + ['auth', 'smart_cdn'], + ['smart-cdn'], + ['smart_sig'], // BC alias + ] + + static override usage = Command.Usage({ + category: 'Auth', + description: 'Generate a signed Smart CDN URL', + details: ` + Read Smart CDN params JSON from stdin and output a signed URL. + Required fields: workspace, template, input + Optional fields: expire_at_ms, url_params + `, + examples: [ + [ + 'Generate Smart CDN URL', + 'echo \'{"workspace":"w","template":"t","input":"i"}\' | transloadit smart-cdn', + ], + [ + 'Using alias', + 'echo \'{"workspace":"w","template":"t","input":"i"}\' | transloadit smart_sig', + ], + ], + }) + + protected async run(): Promise { + const credentials = ensureCredentials() + if (credentials == null) { + this.output.error( + 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', + ) + return 1 + } + const { authKey, authSecret } = credentials + + const rawInput = await readStdin() + const input = rawInput.trim() + if (input === '') { + this.output.error( + 'Missing params provided via stdin. Expected a JSON object with workspace, template, input, and optional Smart CDN parameters.', + ) + return 1 + } + + let parsed: unknown + try { + parsed = JSON.parse(input) + } catch (error) { + this.output.error(`Failed to parse JSON from stdin: ${(error as Error).message}`) + return 1 + } + + if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { + this.output.error('Invalid params provided via stdin. Expected a JSON object.') + return 1 + } + + const parsedResult = smartCdnParamsSchema.safeParse(parsed) + if (!parsedResult.success) { + this.output.error(`Invalid params: ${formatIssues(parsedResult.error.issues)}`) + return 1 + } + + const { + workspace, + template, + input: inputFieldRaw, + url_params, + expire_at_ms, + } = parsedResult.data + const urlParams = normalizeUrlParams(url_params as Record | undefined) + + let expiresAt: number | undefined + if (typeof expire_at_ms === 'string') { + const parsedNumber = Number.parseInt(expire_at_ms, 10) + if (Number.isNaN(parsedNumber)) { + this.output.error('Invalid params: expire_at_ms must be a number.') + return 1 + } + expiresAt = parsedNumber + } else { + expiresAt = expire_at_ms + } + + const inputField = typeof inputFieldRaw === 'string' ? inputFieldRaw : String(inputFieldRaw) + + const client = new Transloadit({ authKey, authSecret }) + try { + const signedUrl = client.getSignedSmartCDNUrl({ + workspace, + template, + input: inputField, + urlParams, + expiresAt, + }) + process.stdout.write(`${signedUrl}\n`) + } catch (error) { + this.output.error(`Failed to generate Smart CDN URL: ${(error as Error).message}`) + return 1 + } + + return undefined + } +} diff --git a/src/cli/commands/bills.ts b/src/cli/commands/bills.ts new file mode 100644 index 00000000..821b3be8 --- /dev/null +++ b/src/cli/commands/bills.ts @@ -0,0 +1,52 @@ +import { Command, Option } from 'clipanion' +import * as bills from '../bills.ts' +import { AuthenticatedCommand } from './BaseCommand.ts' + +export class BillsGetCommand extends AuthenticatedCommand { + static override paths = [ + ['bills', 'get'], + ['bill', 'get'], + ['b', 'get'], + ['b', 'g'], + ] + + static override usage = Command.Usage({ + category: 'Bills', + description: 'Fetch billing information', + details: ` + Fetch billing information for the specified months. + Months should be specified in YYYY-MM format. + If no month is specified, returns the current month. + `, + examples: [ + ['Get current month billing', 'transloadit bills get'], + ['Get specific month', 'transloadit bills get 2024-01'], + ['Get multiple months', 'transloadit bills get 2024-01 2024-02'], + ], + }) + + months = Option.Rest() + + protected async run(): Promise { + const monthList: string[] = [] + + for (const month of this.months) { + if (!/^\d{4}-\d{1,2}$/.test(month)) { + this.output.error(`invalid date format '${month}' (YYYY-MM)`) + return 1 + } + monthList.push(month) + } + + // Default to current month if none specified + if (monthList.length === 0) { + const d = new Date() + monthList.push(`${d.getUTCFullYear()}-${d.getUTCMonth() + 1}`) + } + + await bills.get(this.output, this.client, { + months: monthList, + }) + return undefined + } +} diff --git a/src/cli/commands/index.ts b/src/cli/commands/index.ts new file mode 100644 index 00000000..0e6e7fbd --- /dev/null +++ b/src/cli/commands/index.ts @@ -0,0 +1,64 @@ +import { Builtins, Cli } from 'clipanion' + +import { + AssembliesCreateCommand, + AssembliesDeleteCommand, + AssembliesGetCommand, + AssembliesListCommand, + AssembliesReplayCommand, +} from './assemblies.ts' + +import { SignatureCommand, SmartCdnSignatureCommand } from './auth.ts' + +import { BillsGetCommand } from './bills.ts' + +import { NotificationsListCommand, NotificationsReplayCommand } from './notifications.ts' + +import { + TemplatesCreateCommand, + TemplatesDeleteCommand, + TemplatesGetCommand, + TemplatesListCommand, + TemplatesModifyCommand, + TemplatesSyncCommand, +} from './templates.ts' + +export function createCli(): Cli { + const cli = new Cli({ + binaryLabel: 'Transloadit CLI', + binaryName: 'transloadit', + binaryVersion: '1.0.0', + }) + + // Built-in commands + cli.register(Builtins.HelpCommand) + cli.register(Builtins.VersionCommand) + + // Auth commands (signature generation) + cli.register(SignatureCommand) + cli.register(SmartCdnSignatureCommand) + + // Assemblies commands + cli.register(AssembliesCreateCommand) + cli.register(AssembliesListCommand) + cli.register(AssembliesGetCommand) + cli.register(AssembliesDeleteCommand) + cli.register(AssembliesReplayCommand) + + // Templates commands + cli.register(TemplatesCreateCommand) + cli.register(TemplatesGetCommand) + cli.register(TemplatesModifyCommand) + cli.register(TemplatesDeleteCommand) + cli.register(TemplatesListCommand) + cli.register(TemplatesSyncCommand) + + // Bills commands + cli.register(BillsGetCommand) + + // Notifications commands + cli.register(NotificationsReplayCommand) + cli.register(NotificationsListCommand) + + return cli +} diff --git a/src/cli/commands/notifications.ts b/src/cli/commands/notifications.ts new file mode 100644 index 00000000..183711db --- /dev/null +++ b/src/cli/commands/notifications.ts @@ -0,0 +1,89 @@ +import { Command, Option } from 'clipanion' +import * as notifications from '../notifications.ts' +import { AuthenticatedCommand } from './BaseCommand.ts' + +export class NotificationsReplayCommand extends AuthenticatedCommand { + static override paths = [ + ['assembly-notifications', 'replay'], + ['notifications', 'replay'], + ['notification', 'replay'], + ['n', 'replay'], + ['n', 'r'], + ] + + static override usage = Command.Usage({ + category: 'Notifications', + description: 'Replay notifications for assemblies', + examples: [ + ['Replay notifications', 'transloadit assembly-notifications replay ASSEMBLY_ID'], + [ + 'Replay to a new URL', + 'transloadit assembly-notifications replay --notify-url https://example.com/notify ASSEMBLY_ID', + ], + ], + }) + + notifyUrl = Option.String('--notify-url', { + description: 'Specify a new URL to send the notifications to', + }) + + assemblyIds = Option.Rest({ required: 1 }) + + protected async run(): Promise { + await notifications.replay(this.output, this.client, { + notify_url: this.notifyUrl, + assemblies: this.assemblyIds, + }) + return undefined + } +} + +export class NotificationsListCommand extends AuthenticatedCommand { + static override paths = [ + ['assembly-notifications', 'list'], + ['notifications', 'list'], + ['notification', 'list'], + ['n', 'list'], + ['n', 'l'], + ] + + static override usage = Command.Usage({ + category: 'Notifications', + description: 'List notifications matching given criteria', + details: ` + If ASSEMBLY is specified, return only notifications sent for that assembly. + `, + examples: [ + ['List all notifications', 'transloadit assembly-notifications list'], + ['List failed notifications', 'transloadit assembly-notifications list --failed'], + ['List for specific assembly', 'transloadit assembly-notifications list ASSEMBLY_ID'], + ], + }) + + failed = Option.Boolean('--failed', false, { + description: 'Return only failed notifications', + }) + + successful = Option.Boolean('--successful', false, { + description: 'Return only successful notifications', + }) + + assemblyId = Option.String({ required: false }) + + protected async run(): Promise { + if (this.failed && this.successful) { + this.output.error('assembly-notifications accepts at most one of --failed and --successful') + return 1 + } + + let type: string | undefined + if (this.failed) type = 'failed' + else if (this.successful) type = 'successful' + + await notifications.list(this.output, this.client, { + type, + assembly_id: this.assemblyId, + }) + return undefined + } +} diff --git a/src/cli/commands/templates.ts b/src/cli/commands/templates.ts new file mode 100644 index 00000000..5be53de3 --- /dev/null +++ b/src/cli/commands/templates.ts @@ -0,0 +1,226 @@ +import { Command, Option } from 'clipanion' +import * as templates from '../templates.ts' +import { AuthenticatedCommand } from './BaseCommand.ts' + +export class TemplatesCreateCommand extends AuthenticatedCommand { + static override paths = [ + ['templates', 'create'], + ['template', 'create'], + ['t', 'create'], + ['t', 'c'], + ] + + static override usage = Command.Usage({ + category: 'Templates', + description: 'Create a new template', + details: ` + Create a new template with the given name. + If FILE is not specified, reads from STDIN. + `, + examples: [ + ['Create template from file', 'transloadit templates create my-template steps.json'], + ['Create template from stdin', 'cat steps.json | transloadit templates create my-template'], + ], + }) + + name = Option.String({ required: true }) + file = Option.String({ required: false }) + + protected async run(): Promise { + await templates.create(this.output, this.client, { + name: this.name, + file: this.file ?? '-', + }) + return undefined + } +} + +export class TemplatesGetCommand extends AuthenticatedCommand { + static override paths = [ + ['templates', 'get'], + ['template', 'get'], + ['t', 'get'], + ['t', 'g'], + ] + + static override usage = Command.Usage({ + category: 'Templates', + description: 'Retrieve the template content as JSON', + examples: [['Get a template', 'transloadit templates get TEMPLATE_ID']], + }) + + templateIds = Option.Rest({ required: 1 }) + + protected async run(): Promise { + await templates.get(this.output, this.client, { + templates: this.templateIds, + }) + return undefined + } +} + +export class TemplatesModifyCommand extends AuthenticatedCommand { + static override paths = [ + ['templates', 'modify'], + ['template', 'modify'], + ['t', 'modify'], + ['t', 'm'], + ['templates', 'edit'], + ['template', 'edit'], + ] + + static override usage = Command.Usage({ + category: 'Templates', + description: 'Change the JSON content of a template', + details: ` + Modify an existing template. + If FILE is not specified, reads from STDIN. + `, + examples: [ + ['Modify template from file', 'transloadit templates modify TEMPLATE_ID steps.json'], + ['Rename a template', 'transloadit templates modify --name new-name TEMPLATE_ID'], + ], + }) + + newName = Option.String('--name,-n', { + description: 'A new name for the template', + }) + + templateId = Option.String({ required: true }) + file = Option.String({ required: false }) + + protected async run(): Promise { + await templates.modify(this.output, this.client, { + template: this.templateId, + name: this.newName, + file: this.file ?? '-', + }) + return undefined + } +} + +export class TemplatesDeleteCommand extends AuthenticatedCommand { + static override paths = [ + ['templates', 'delete'], + ['template', 'delete'], + ['t', 'delete'], + ['t', 'd'], + ] + + static override usage = Command.Usage({ + category: 'Templates', + description: 'Delete templates', + examples: [['Delete a template', 'transloadit templates delete TEMPLATE_ID']], + }) + + templateIds = Option.Rest({ required: 1 }) + + protected async run(): Promise { + await templates.delete(this.output, this.client, { + templates: this.templateIds, + }) + return undefined + } +} + +export class TemplatesListCommand extends AuthenticatedCommand { + static override paths = [ + ['templates', 'list'], + ['template', 'list'], + ['t', 'list'], + ['t', 'l'], + ] + + static override usage = Command.Usage({ + category: 'Templates', + description: 'List templates matching given criteria', + examples: [ + ['List all templates', 'transloadit templates list'], + ['List templates sorted by name', 'transloadit templates list --sort name'], + ], + }) + + after = Option.String('--after,-a', { + description: 'Return only templates created after specified date', + }) + + before = Option.String('--before,-b', { + description: 'Return only templates created before specified date', + }) + + sort = Option.String('--sort', { + description: 'Field to sort by (id, name, created, or modified)', + }) + + order = Option.String('--order', { + description: 'Sort ascending or descending (asc or desc)', + }) + + fields = Option.String('--fields', { + description: 'Comma-separated list of fields to return for each template', + }) + + protected async run(): Promise { + if (this.sort && !['id', 'name', 'created', 'modified'].includes(this.sort)) { + this.output.error('invalid argument for --sort') + return 1 + } + + if (this.order && !['asc', 'desc'].includes(this.order)) { + this.output.error('invalid argument for --order') + return 1 + } + + const fieldList = this.fields ? this.fields.split(',') : undefined + + await templates.list(this.output, this.client, { + after: this.after, + before: this.before, + sort: this.sort, + order: this.order as 'asc' | 'desc' | undefined, + fields: fieldList, + }) + return undefined + } +} + +export class TemplatesSyncCommand extends AuthenticatedCommand { + static override paths = [ + ['templates', 'sync'], + ['template', 'sync'], + ['t', 'sync'], + ['t', 's'], + ] + + static override usage = Command.Usage({ + category: 'Templates', + description: 'Synchronize local template files with the Transloadit API', + details: ` + Template files must be named *.json and have the key "transloadit_template_id" + and optionally "steps". If "transloadit_template_id" is an empty string, then + a new template will be created using the instructions in "steps". If "steps" is + missing then it will be filled in by the instructions of the template specified + by "transloadit_template_id". If both keys are present then the local template + file and the remote template will be synchronized to whichever was more recently + modified. + `, + examples: [ + ['Sync templates in a directory', 'transloadit templates sync templates/'], + ['Sync recursively', 'transloadit templates sync --recursive templates/'], + ], + }) + + recursive = Option.Boolean('--recursive,-r', false, { + description: 'Look for template files in directories recursively', + }) + + files = Option.Rest() + + protected async run(): Promise { + await templates.sync(this.output, this.client, { + recursive: this.recursive, + files: this.files, + }) + return undefined + } +} diff --git a/src/cli/helpers.ts b/src/cli/helpers.ts new file mode 100644 index 00000000..fd09641b --- /dev/null +++ b/src/cli/helpers.ts @@ -0,0 +1,60 @@ +import fs from 'node:fs' +import type { Readable } from 'node:stream' +import type { APIError } from './types.ts' +import { isAPIError } from './types.ts' + +export function createReadStream(file: string): Readable { + if (file === '-') return process.stdin + return fs.createReadStream(file) +} + +export function stream2buf(stream: Readable, cb: (err: Error | null, buf?: Buffer) => void): void { + let size = 0 + const bufs: Buffer[] = [] + + stream.on('error', cb) + + stream.on('readable', () => { + const chunk = stream.read() as Buffer | null + if (chunk === null) return + + size += chunk.length + bufs.push(chunk) + }) + + stream.on('end', () => { + const buf = Buffer.alloc(size) + let offset = 0 + + for (const b of bufs) { + b.copy(buf, offset) + offset += b.length + } + + cb(null, buf) + }) +} + +export function formatAPIError(err: unknown): string { + if (isAPIError(err)) { + return `${err.error}: ${err.message}` + } + if (err instanceof Error) { + return err.message + } + return String(err) +} + +// Re-export APIError type for convenience +export type { APIError } + +export function zip(listA: A[], listB: B[]): [A, B][] +export function zip(...lists: T[][]): T[][] +export function zip(...lists: T[][]): T[][] { + const length = Math.max(...lists.map((list) => list.length)) + const result: T[][] = new Array(length) + for (let i = 0; i < result.length; i++) { + result[i] = lists.map((list) => list[i] as T) + } + return result +} diff --git a/src/cli/notifications.ts b/src/cli/notifications.ts new file mode 100644 index 00000000..a0439730 --- /dev/null +++ b/src/cli/notifications.ts @@ -0,0 +1,36 @@ +import { tryCatch } from '../alphalib/tryCatch.ts' +import type { Transloadit } from '../Transloadit.ts' +import type { IOutputCtl } from './OutputCtl.ts' +import { ensureError } from './types.ts' + +export interface NotificationsReplayOptions { + notify_url?: string + assemblies: string[] +} + +export interface NotificationsListOptions { + type?: string + assembly_id?: string + pagesize?: number +} + +export async function replay( + output: IOutputCtl, + client: Transloadit, + { notify_url, assemblies }: NotificationsReplayOptions, +): Promise { + const promises = assemblies.map((id) => client.replayAssemblyNotification(id, { notify_url })) + const [err] = await tryCatch(Promise.all(promises)) + if (err) { + output.error(ensureError(err).message) + } +} + +export function list( + output: IOutputCtl, + _client: Transloadit, + { type: _type, assembly_id: _assembly_id }: NotificationsListOptions, +): Promise { + output.error('List notifications is not supported in this version') + return Promise.resolve() +} diff --git a/src/cli/template-last-modified.ts b/src/cli/template-last-modified.ts new file mode 100644 index 00000000..eae0718a --- /dev/null +++ b/src/cli/template-last-modified.ts @@ -0,0 +1,156 @@ +import type { Transloadit } from '../Transloadit.ts' +import { ensureError } from './types.ts' + +interface TemplateItem { + id: string + modified: string +} + +type FetchCallback = (err: Error | null, result?: T) => void +type PageFetcher = (page: number, pagesize: number, cb: FetchCallback) => void + +class MemoizedPagination { + private pagesize: number + private fetch: PageFetcher + private cache: (T | undefined)[] + + constructor(pagesize: number, fetch: PageFetcher) { + this.pagesize = pagesize + this.fetch = fetch + this.cache = [] + } + + get(i: number, cb: FetchCallback): void { + const cached = this.cache[i] + if (cached !== undefined) { + process.nextTick(() => cb(null, cached)) + return + } + + const page = Math.floor(i / this.pagesize) + 1 + const start = (page - 1) * this.pagesize + + this.fetch(page, this.pagesize, (err, result) => { + if (err) { + cb(err) + return + } + if (!result) { + cb(new Error('No result returned from fetch')) + return + } + for (let j = 0; j < this.pagesize; j++) { + this.cache[start + j] = result[j] + } + cb(null, this.cache[i]) + }) + } +} + +export default class ModifiedLookup { + private byOrdinal: MemoizedPagination + + constructor(client: Transloadit, pagesize = 50) { + this.byOrdinal = new MemoizedPagination(pagesize, (page, pagesize, cb) => { + const params = { + sort: 'id' as const, + order: 'asc' as const, + fields: ['id', 'modified'] as ('id' | 'modified')[], + page, + pagesize, + } + + client + .listTemplates(params) + .then((result) => { + const items: TemplateItem[] = new Array(pagesize) + // Fill with sentinel value larger than any hex ID + items.fill({ id: 'gggggggggggggggggggggggggggggggg', modified: '' }) + for (let i = 0; i < result.items.length; i++) { + const item = result.items[i] + if (item) { + items[i] = { id: item.id, modified: item.modified } + } + } + cb(null, items) + }) + .catch((err: unknown) => { + cb(ensureError(err)) + }) + }) + } + + private idByOrd(ord: number, cb: FetchCallback): void { + this.byOrdinal.get(ord, (err, result) => { + if (err) { + cb(err) + return + } + if (!result) { + cb(new Error('No result found')) + return + } + cb(null, result.id) + }) + } + + byId(id: string, cb: FetchCallback): void { + const findUpperBound = (bound: number): void => { + this.idByOrd(bound, (err, idAtBound) => { + if (err) { + cb(err) + return + } + if (idAtBound === id) { + complete(bound) + return + } + if (idAtBound && idAtBound > id) { + refine(Math.floor(bound / 2), bound) + return + } + findUpperBound(bound * 2) + }) + } + + const refine = (lower: number, upper: number): void => { + if (lower >= upper - 1) { + cb(new Error(`Template ID ${id} not found in ModifiedLookup`)) + return + } + + const middle = Math.floor((lower + upper) / 2) + this.idByOrd(middle, (err, idAtMiddle) => { + if (err) { + cb(err) + return + } + if (idAtMiddle === id) { + complete(middle) + return + } + if (idAtMiddle && idAtMiddle < id) { + refine(middle, upper) + return + } + refine(lower, middle) + }) + } + + const complete = (ord: number): void => { + this.byOrdinal.get(ord, (err, result) => { + if (err) { + cb(err) + return + } + if (!result) { + cb(new Error('No result found')) + return + } + cb(null, new Date(result.modified)) + }) + } + + findUpperBound(1) + } +} diff --git a/src/cli/templates.ts b/src/cli/templates.ts new file mode 100644 index 00000000..db8eadfc --- /dev/null +++ b/src/cli/templates.ts @@ -0,0 +1,342 @@ +import fsp from 'node:fs/promises' +import path from 'node:path' +import { promisify } from 'node:util' +import rreaddir from 'recursive-readdir' +import { z } from 'zod' +import { tryCatch } from '../alphalib/tryCatch.ts' +import type { TemplateContent } from '../apiTypes.ts' +import type { Transloadit } from '../Transloadit.ts' +import { createReadStream, formatAPIError, stream2buf } from './helpers.ts' +import type { IOutputCtl } from './OutputCtl.ts' +import ModifiedLookup from './template-last-modified.ts' +import type { TemplateFile } from './types.ts' +import { ensureError, isTransloaditAPIError, TemplateFileDataSchema } from './types.ts' + +const rreaddirAsync = promisify(rreaddir) + +export interface TemplateCreateOptions { + name: string + file: string +} + +export interface TemplateGetOptions { + templates: string[] +} + +export interface TemplateModifyOptions { + template: string + name?: string + file: string +} + +export interface TemplateDeleteOptions { + templates: string[] +} + +export interface TemplateListOptions { + before?: string + after?: string + order?: 'asc' | 'desc' + sort?: string + fields?: string[] +} + +export interface TemplateSyncOptions { + files: string[] + recursive?: boolean +} + +const StepsSchema = z.record(z.string(), z.unknown()) + +export async function create( + output: IOutputCtl, + client: Transloadit, + { name, file }: TemplateCreateOptions, +): Promise { + try { + const buf = await new Promise((resolve, reject) => { + stream2buf(createReadStream(file), (err, buf) => { + if (err) reject(err) + else if (buf) resolve(buf) + else reject(new Error('No buffer received')) + }) + }) + + const parsed: unknown = JSON.parse(buf.toString()) + const validated = StepsSchema.safeParse(parsed) + if (!validated.success) { + throw new Error('Invalid template steps format') + } + + const result = await client.createTemplate({ + name, + template: { steps: validated.data } as TemplateContent, + }) + output.print(result.id, result) + return result + } catch (err) { + const error = ensureError(err) + output.error(error.message) + throw err + } +} + +export async function get( + output: IOutputCtl, + client: Transloadit, + { templates }: TemplateGetOptions, +): Promise { + const requests = templates.map((template) => client.getTemplate(template)) + + const [err, results] = await tryCatch(Promise.all(requests)) + if (err) { + output.error(formatAPIError(err)) + throw err + } + + for (const result of results) { + output.print(result, result) + } +} + +export async function modify( + output: IOutputCtl, + client: Transloadit, + { template, name, file }: TemplateModifyOptions, +): Promise { + try { + const buf = await new Promise((resolve, reject) => { + stream2buf(createReadStream(file), (err, buf) => { + if (err) reject(err) + else if (buf) resolve(buf) + else reject(new Error('No buffer received')) + }) + }) + + let json: Record | null = null + let newName = name + + if (buf.length > 0) { + const parsed: unknown = JSON.parse(buf.toString()) + const validated = StepsSchema.safeParse(parsed) + if (!validated.success) { + throw new Error('Invalid template steps format') + } + json = validated.data + } + + if (!name || buf.length === 0) { + const tpl = await client.getTemplate(template) + if (!name) newName = tpl.name + if (buf.length === 0) { + const stepsContent = tpl.content.steps + if (stepsContent && typeof stepsContent === 'object') { + json = stepsContent as Record + } + } + } + + if (json === null) { + throw new Error('No steps to update template with') + } + + await client.editTemplate(template, { + name: newName, + template: { steps: json } as TemplateContent, + }) + } catch (err) { + output.error(formatAPIError(err)) + throw err + } +} + +async function _delete( + output: IOutputCtl, + client: Transloadit, + { templates }: TemplateDeleteOptions, +): Promise { + await Promise.all( + templates.map(async (template) => { + const [err] = await tryCatch(client.deleteTemplate(template)) + if (err) { + output.error(formatAPIError(err)) + throw err + } + }), + ) +} +export { _delete as delete } + +const TemplateIdSchema = z.object({ + id: z.string(), +}) + +export function list( + output: IOutputCtl, + client: Transloadit, + { before, after, order, sort, fields }: TemplateListOptions, +): void { + const stream = client.streamTemplates({ + todate: before, + fromdate: after, + order, + sort: sort as 'id' | 'name' | 'created' | 'modified' | undefined, + }) + + stream.on('readable', () => { + const template: unknown = stream.read() + if (template == null) return + + const parsed = TemplateIdSchema.safeParse(template) + if (!parsed.success) return + + if (fields == null) { + output.print(parsed.data.id, template) + } else { + const templateRecord = template as Record + output.print(fields.map((field) => templateRecord[field]).join(' '), template) + } + }) + + stream.on('error', (err: unknown) => { + output.error(formatAPIError(err)) + }) +} + +export async function sync( + output: IOutputCtl, + client: Transloadit, + { files, recursive }: TemplateSyncOptions, +): Promise { + // Promise [String] -- all files in the directory tree + const relevantFilesNested = await Promise.all( + files.map(async (file) => { + const stats = await fsp.stat(file) + if (!stats.isDirectory()) return [file] + + let children: string[] + if (recursive) { + children = (await rreaddirAsync(file)) as string[] + } else { + const list = await fsp.readdir(file) + children = list.map((child) => path.join(file, child)) + } + + if (recursive) return children + + // Filter directories if not recursive + const filtered = await Promise.all( + children.map(async (child) => { + const childStats = await fsp.stat(child) + return childStats.isDirectory() ? null : child + }), + ) + return filtered.filter((f): f is string => f !== null) + }), + ) + const relevantFiles = relevantFilesNested.flat() + + // Promise [{ file: String, data: JSON }] -- all templates + const maybeFiles = await Promise.all(relevantFiles.map(templateFileOrNull)) + const templates = maybeFiles.filter((maybeFile): maybeFile is TemplateFile => maybeFile !== null) + + async function templateFileOrNull(file: string): Promise { + if (path.extname(file) !== '.json') return null + + try { + const data = await fsp.readFile(file, 'utf8') + const parsed: unknown = JSON.parse(data) + const validated = TemplateFileDataSchema.safeParse(parsed) + if (!validated.success) return null + return 'transloadit_template_id' in validated.data ? { file, data: validated.data } : null + } catch (e) { + if (e instanceof SyntaxError) return null + throw e + } + } + + const modified = new ModifiedLookup(client) + + const [err] = await tryCatch( + Promise.all( + templates.map(async (template) => { + if (!('steps' in template.data)) { + if (!template.data.transloadit_template_id) { + throw new Error(`Template file has no id and no steps: ${template.file}`) + } + return download(template) + } + + if (!template.data.transloadit_template_id) return upload(template) + + const stats = await fsp.stat(template.file) + const fileModified = stats.mtime + + let templateModified: Date + const templateId = template.data.transloadit_template_id + try { + await client.getTemplate(templateId) + templateModified = await new Promise((resolve, reject) => + modified.byId(templateId, (err, res) => { + if (err) { + reject(err) + } else if (res) { + resolve(res) + } else { + reject(new Error('No date returned')) + } + }), + ) + } catch (err) { + if (isTransloaditAPIError(err)) { + if (err.code === 'SERVER_404' || (err.response && err.response.statusCode === 404)) { + throw new Error(`Template file references nonexistent template: ${template.file}`) + } + } + throw err + } + + if (fileModified > templateModified) return upload(template) + return download(template) + }), + ), + ) + if (err) { + output.error(err) + throw err + } + + async function upload(template: TemplateFile): Promise { + const params = { + name: path.basename(template.file, '.json'), + template: { steps: template.data.steps } as TemplateContent, + } + + if (!template.data.transloadit_template_id) { + const result = await client.createTemplate(params) + template.data.transloadit_template_id = result.id + await fsp.writeFile(template.file, JSON.stringify(template.data)) + return + } + + await client.editTemplate(template.data.transloadit_template_id, params) + } + + async function download(template: TemplateFile): Promise { + const templateId = template.data.transloadit_template_id + if (!templateId) { + throw new Error('Cannot download template without id') + } + + const result = await client.getTemplate(templateId) + + template.data.steps = result.content as Record + const file = path.join(path.dirname(template.file), `${result.name}.json`) + + await fsp.writeFile(template.file, JSON.stringify(template.data)) + + if (file !== template.file) { + await fsp.rename(template.file, file) + } + } +} diff --git a/src/cli/types.ts b/src/cli/types.ts new file mode 100644 index 00000000..98deeae8 --- /dev/null +++ b/src/cli/types.ts @@ -0,0 +1,176 @@ +import { z } from 'zod' +import type { BillResponse, ListedTemplate, TemplateResponse } from '../apiTypes.ts' +import type { AssemblyStatus, Transloadit } from '../Transloadit.ts' +import type { IOutputCtl } from './OutputCtl.ts' + +// Re-export transloadit types for CLI use +export type { AssemblyStatus, BillResponse, ListedTemplate, TemplateResponse } +export type { Transloadit } +export type { CreateAssemblyOptions } from '../Transloadit.ts' + +// Zod schemas for runtime validation +export const APIErrorSchema = z.object({ + error: z.string(), + message: z.string(), +}) +export type APIError = z.infer + +export const TransloaditAPIErrorSchema = z.object({ + error: z.string().optional(), + message: z.string(), + code: z.string().optional(), + transloaditErrorCode: z.string().optional(), + response: z + .object({ + body: z + .object({ + error: z.string().optional(), + }) + .optional(), + statusCode: z.number().optional(), + }) + .optional(), +}) +export type TransloaditAPIError = z.infer + +// Template file data +export const TemplateFileDataSchema = z + .object({ + transloadit_template_id: z.string().optional(), + steps: z.record(z.string(), z.unknown()).optional(), + }) + .passthrough() +export type TemplateFileData = z.infer + +export interface TemplateFile { + file: string + data: TemplateFileData +} + +// Template list item (from API) +export interface TemplateListItem { + id: string + modified: string + name?: string +} + +// CLI Invocation types +export interface BaseInvocation { + error?: boolean + message?: string + mode: string + action?: string + logLevel?: number + jsonMode?: boolean +} + +export interface AssemblyInvocation extends BaseInvocation { + mode: 'assemblies' + action?: 'create' | 'get' | 'list' | 'delete' | 'replay' + inputs: string[] + output?: string + recursive?: boolean + watch?: boolean + del?: boolean + reprocessStale?: boolean + steps?: string + template?: string + fields?: Record + assemblies?: string[] + before?: string + after?: string + keywords?: string[] + notify_url?: string + reparse?: boolean +} + +export interface TemplateInvocation extends BaseInvocation { + mode: 'templates' + action?: 'create' | 'get' | 'list' | 'delete' | 'modify' | 'sync' + templates?: string[] + template?: string + name?: string + file?: string + files?: string[] + before?: string + after?: string + order?: 'asc' | 'desc' + sort?: string + fields?: string[] + recursive?: boolean +} + +export interface BillInvocation extends BaseInvocation { + mode: 'bills' + action?: 'get' + months: string[] +} + +export interface NotificationInvocation extends BaseInvocation { + mode: 'assembly-notifications' + action?: 'list' | 'replay' + assemblies?: string[] + notify_url?: string + type?: string + assembly_id?: string + pagesize?: number +} + +export interface HelpInvocation extends BaseInvocation { + mode: 'help' | 'version' | 'register' +} + +export type Invocation = + | AssemblyInvocation + | TemplateInvocation + | BillInvocation + | NotificationInvocation + | HelpInvocation + +// Command handler type +export type CommandHandler = ( + output: IOutputCtl, + client: Transloadit | undefined, + invocation: T, +) => void | Promise + +// Type guard for Error +export function isError(value: unknown): value is Error { + return value instanceof Error +} + +// Helper to ensure error is Error type +export function ensureError(value: unknown): Error { + if (value instanceof Error) { + return value + } + return new Error(`Non-error was thrown: ${String(value)}`) +} + +// Type guard for APIError +export function isAPIError(value: unknown): value is APIError { + return APIErrorSchema.safeParse(value).success +} + +// Type guard for TransloaditAPIError +export function isTransloaditAPIError(value: unknown): value is TransloaditAPIError { + return TransloaditAPIErrorSchema.safeParse(value).success +} + +// Type guard for NodeJS.ErrnoException +export function isErrnoException(value: unknown): value is NodeJS.ErrnoException { + return value instanceof Error && 'code' in value +} + +// Safe array access helper +export function safeGet(arr: T[], index: number): T | undefined { + return arr[index] +} + +// Assert defined helper +export function assertDefined(value: T | undefined | null, message: string): T { + if (value === undefined || value === null) { + throw new Error(message) + } + return value +} diff --git a/test/e2e/OutputCtl.ts b/test/e2e/OutputCtl.ts new file mode 100644 index 00000000..0fefa189 --- /dev/null +++ b/test/e2e/OutputCtl.ts @@ -0,0 +1,48 @@ +import type { OutputCtlOptions } from '../../src/cli/OutputCtl.ts' + +interface OutputEntry { + type: 'error' | 'warn' | 'info' | 'debug' | 'print' + msg: unknown + json?: unknown +} + +/** + * Test version of OutputCtl that captures output for verification + * instead of writing to console. Implements the same interface as src/cli/OutputCtl. + */ +export default class OutputCtl { + private output: OutputEntry[] + // These properties are required by the src/cli/OutputCtl interface but not used in tests + private json: boolean + private logLevel: number + + constructor({ logLevel = 0, jsonMode = false }: OutputCtlOptions = {}) { + this.output = [] + this.json = jsonMode + this.logLevel = logLevel + } + + error(msg: unknown): void { + this.output.push({ type: 'error', msg }) + } + + warn(msg: unknown): void { + this.output.push({ type: 'warn', msg }) + } + + info(msg: unknown): void { + this.output.push({ type: 'info', msg }) + } + + debug(msg: unknown): void { + this.output.push({ type: 'debug', msg }) + } + + print(msg: unknown, json?: unknown): void { + this.output.push({ type: 'print', msg, json }) + } + + get(debug = false): OutputEntry[] { + return this.output.filter((line) => debug || line.type !== 'debug') + } +} diff --git a/test/e2e/assemblies.test.ts b/test/e2e/assemblies.test.ts new file mode 100644 index 00000000..b988dbba --- /dev/null +++ b/test/e2e/assemblies.test.ts @@ -0,0 +1,437 @@ +import fsp from 'node:fs/promises' +import process from 'node:process' +import { promisify } from 'node:util' +import { imageSize } from 'image-size' +import rreaddir from 'recursive-readdir' +import { describe, expect, it } from 'vitest' +import * as assemblies from '../../src/cli/assemblies.ts' +import assembliesCreate from '../../src/cli/assemblies-create.ts' +import { zip } from '../../src/cli/helpers.ts' +import OutputCtl from './OutputCtl.ts' +import type { OutputEntry } from './test-utils.ts' +import { testCase } from './test-utils.ts' + +const rreaddirAsync = promisify(rreaddir) + +describe('assemblies', () => { + describe('get', () => { + it( + 'should get assemblies', + testCase(async (client) => { + const response = await client.listAssemblies({ + pagesize: 5, + type: 'completed', + }) + const assemblyList = response.items + if (assemblyList.length === 0) throw new Error('account has no assemblies to fetch') + + const expectations = await Promise.all( + assemblyList.map((assembly) => client.getAssembly(assembly.id)), + ) + + const actuals = await Promise.all( + assemblyList.map(async (assembly) => { + const output = new OutputCtl() + await assemblies.get(output, client, { assemblies: [assembly.id] }) + return output.get() as OutputEntry[] + }), + ) + + for (const [expectation, actual] of zip(expectations, actuals)) { + expect(actual).to.have.lengthOf(1) + expect(actual).to.have.nested.property('[0].type').that.equals('print') + expect(actual).to.have.nested.property('[0].json').that.deep.equals(expectation) + } + }), + ) + + it( + 'should return assemblies in the order specified', + testCase(async (client) => { + const response = await client.listAssemblies({ pagesize: 5 }) + const assemblyList = response.items.sort(() => 2 * Math.floor(Math.random() * 2) - 1) + if (assemblyList.length === 0) throw new Error('account has no assemblies to fetch') + + const ids = assemblyList.map((assembly) => assembly.id) + + const output = new OutputCtl() + await assemblies.get(output, client, { assemblies: ids }) + const results = output.get() as OutputEntry[] + + try { + expect(results).to.have.lengthOf(ids.length) + } catch (e) { + console.error('DEBUG: Results:', JSON.stringify(results, null, 2)) + console.error('DEBUG: Ids:', JSON.stringify(ids, null, 2)) + throw e + } + for (const [result, id] of zip(results, ids)) { + expect(result).to.have.property('type').that.equals('print') + expect(result).to.have.nested.property('json.assembly_id').that.equals(id) + } + }), + ) + }) + + describe('list', () => { + it( + 'should list assemblies', + testCase(async (client) => { + const output = new OutputCtl() + await assemblies.list(output, client, { pagesize: 1 }) + const logs = output.get() as OutputEntry[] + expect(logs.filter((l) => l.type === 'error')).to.have.lengthOf(0) + }), + ) + }) + + describe('delete', () => { + it( + 'should delete assemblies', + testCase(async (client) => { + const assembly = await client.createAssembly({ + params: { + steps: { import: { robot: '/http/import', url: 'https://placehold.co/100.jpg' } }, + }, + }) + + const output = new OutputCtl() + const assemblyId = assembly.assembly_id as string + await assemblies.delete(output, client, { assemblies: [assemblyId] }) + const res = await client.getAssembly(assemblyId) + expect(res.ok).to.equal('ASSEMBLY_CANCELED') + }), + ) + }) + + describe('replay', () => { + it( + 'should replay assemblies', + testCase(async (client) => { + const assembly = await client.createAssembly({ + params: { + steps: { import: { robot: '/http/import', url: 'https://placehold.co/100.jpg' } }, + }, + }) + + const output = new OutputCtl() + const assemblyId = assembly.assembly_id as string + await assemblies.replay(output, client, { + assemblies: [assemblyId], + steps: undefined, + }) + const logs = output.get() as OutputEntry[] + expect(logs.filter((l) => l.type === 'error')).to.have.lengthOf(0) + }), + ) + }) + + describe('create', () => { + const genericImg = 'https://placehold.co/100.jpg' + + async function imgPromise(fname = 'in.jpg'): Promise { + const response = await fetch(genericImg) + if (!response.ok) { + throw new Error(`Failed to fetch image: ${response.status}`) + } + const buffer = Buffer.from(await response.arrayBuffer()) + await fsp.writeFile(fname, buffer) + return fname + } + + const genericSteps = { + resize: { + robot: '/image/resize', + use: ':original', + result: true, + width: 130, + height: 130, + }, + } + + async function stepsPromise( + _fname = 'steps.json', + steps: Record = genericSteps, + ): Promise { + await fsp.writeFile('steps.json', JSON.stringify(steps)) + return 'steps.json' + } + + it( + 'should transcode a file', + testCase(async (client) => { + const infile = await imgPromise() + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: [infile], + output: 'out.jpg', + }) + const result = output.get(true) as OutputEntry[] + + expect(result.length).to.be.at.least(3) + const msgs = result.map((r) => r.msg) + expect(msgs).to.include('GOT JOB in.jpg out.jpg') + expect(msgs).to.include('DOWNLOADING') + expect(msgs).to.include('COMPLETED in.jpg out.jpg') + + const imgBuffer = await fsp.readFile('out.jpg') + const dim = imageSize(new Uint8Array(imgBuffer)) + expect(dim).to.have.property('width').that.equals(130) + expect(dim).to.have.property('height').that.equals(130) + }), + ) + + it( + 'should handle multiple inputs', + testCase(async (client) => { + const infiles = await Promise.all(['in1.jpg', 'in2.jpg', 'in3.jpg'].map(imgPromise)) + const steps = await stepsPromise() + await fsp.mkdir('out') + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: infiles, + output: 'out', + }) + + const outs = await fsp.readdir('out') + expect(outs[0]).to.equal('in1.jpg') + expect(outs[1]).to.equal('in2.jpg') + expect(outs[2]).to.equal('in3.jpg') + expect(outs).to.have.lengthOf(3) + }), + ) + + it( + 'should not output outside outdir', + testCase(async (client) => { + await fsp.mkdir('sub') + process.chdir('sub') + + const infile = await imgPromise('../in.jpg') + await fsp.mkdir('out') + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: [infile], + output: 'out', + }) + + const outs = await fsp.readdir('out') + expect(outs[0]).to.equal('in.jpg') + expect(outs).to.have.lengthOf(1) + + const ls = await fsp.readdir('.') + expect(ls).to.not.contain('in.jpg') + }), + ) + + it( + 'should structure output directory correctly', + testCase(async (client) => { + await fsp.mkdir('in') + await fsp.mkdir('in/sub') + await Promise.all(['1.jpg', 'in/2.jpg', 'in/sub/3.jpg'].map(imgPromise)) + await fsp.mkdir('out') + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + recursive: true, + steps, + inputs: ['1.jpg', 'in'], + output: 'out', + }) + + const outs = await rreaddirAsync('out') + expect(outs).to.include('out/1.jpg') + expect(outs).to.include('out/2.jpg') + expect(outs).to.include('out/sub/3.jpg') + expect(outs).to.have.lengthOf(3) + }), + ) + + it( + 'should not be recursive by default', + testCase(async (client) => { + await fsp.mkdir('in') + await fsp.mkdir('in/sub') + await Promise.all(['in/2.jpg', 'in/sub/3.jpg'].map(imgPromise)) + await fsp.mkdir('out') + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: ['in'], + output: 'out', + }) + + const outs = await rreaddirAsync('out') + expect(outs).to.include('out/2.jpg') + expect(outs).to.not.include('out/sub/3.jpg') + expect(outs).to.have.lengthOf(1) + }), + ) + + it( + 'should be able to handle directories recursively', + testCase(async (client) => { + await fsp.mkdir('in') + await fsp.mkdir('in/sub') + await Promise.all(['in/2.jpg', 'in/sub/3.jpg'].map(imgPromise)) + await fsp.mkdir('out') + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + recursive: true, + steps, + inputs: ['in'], + output: 'out', + }) + + const outs = await rreaddirAsync('out') + expect(outs).to.include('out/2.jpg') + expect(outs).to.include('out/sub/3.jpg') + expect(outs).to.have.lengthOf(2) + }), + ) + + it( + 'should detect outdir conflicts', + testCase(async (client) => { + await fsp.mkdir('in') + await Promise.all(['1.jpg', 'in/1.jpg'].map(imgPromise)) + await fsp.mkdir('out') + const steps = await stepsPromise() + + const output = new OutputCtl() + try { + await assembliesCreate(output, client, { + steps, + inputs: ['1.jpg', 'in'], + output: 'out', + }) + throw new Error('assembliesCreate didnt err; should have') + } catch (_err) { + const result = output.get() as OutputEntry[] + expect(result[result.length - 1]) + .to.have.property('type') + .that.equals('error') + expect(result[result.length - 1]) + .to.have.nested.property('msg.message') + .that.equals("Output collision between 'in/1.jpg' and '1.jpg'") + } + // Allow time for any pending file streams to settle/error before test cleanup + await new Promise((resolve) => setTimeout(resolve, 100)) + }), + ) + + it( + 'should not download the result if no output is specified', + testCase(async (client) => { + const infile = await imgPromise() + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: [infile], + output: null, + }) + const result = output.get(true) as OutputEntry[] + + // When no output is specified, we might still get debug messages but no actual downloads + const downloadingMsgs = result.filter((line) => String(line.msg) === 'DOWNLOADING') + expect(downloadingMsgs.length).to.be.lessThanOrEqual(1) + }), + ) + + it( + 'should accept invocations with no inputs', + testCase(async (client) => { + await imgPromise() + const steps = await stepsPromise('steps.json', { + import: { + robot: '/http/import', + url: genericImg, + }, + resize: { + robot: '/image/resize', + use: 'import', + result: true, + width: 130, + height: 130, + }, + }) + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: [], + output: 'out.jpg', + }) + + await fsp.access('out.jpg') + }), + ) + + it( + 'should allow deleting inputs after processing', + testCase(async (client) => { + const infile = await imgPromise() + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: [infile], + output: null, + del: true, + }) + + try { + await fsp.access(infile) + throw new Error('File should have been deleted') + } catch (err) { + expect((err as NodeJS.ErrnoException).code).to.equal('ENOENT') + } + }), + ) + + it( + 'should not reprocess inputs that are older than their output', + testCase(async (client) => { + const infiles = await Promise.all(['in1.jpg', 'in2.jpg', 'in3.jpg'].map(imgPromise)) + const steps = await stepsPromise() + await fsp.mkdir('out') + + const output1 = new OutputCtl() + await assembliesCreate(output1, client, { + steps, + inputs: [infiles[0] as string], + output: 'out', + }) + + const output2 = new OutputCtl() + await assembliesCreate(output2, client, { + steps, + inputs: infiles, + output: 'out', + }) + const result = output2.get(true) as OutputEntry[] + + expect( + result.map((line) => line.msg).filter((msg) => String(msg).includes('in1.jpg')), + ).to.have.lengthOf(0) + }), + ) + }) +}) diff --git a/test/e2e/bills.test.ts b/test/e2e/bills.test.ts new file mode 100644 index 00000000..a4cc597d --- /dev/null +++ b/test/e2e/bills.test.ts @@ -0,0 +1,22 @@ +import { describe, expect, it } from 'vitest' +import * as bills from '../../src/cli/bills.ts' +import OutputCtl from './OutputCtl.ts' +import type { OutputEntry } from './test-utils.ts' +import { testCase } from './test-utils.ts' + +describe('bills', () => { + describe('get', () => { + it( + 'should get bills', + testCase(async (client) => { + const output = new OutputCtl() + const date = new Date() + const month = `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}` + await bills.get(output, client, { months: [month] }) + const logs = output.get() as OutputEntry[] + expect(logs.filter((l) => l.type === 'error')).to.have.lengthOf(0) + expect(logs.filter((l) => l.type === 'print')).to.have.length.above(0) + }), + ) + }) +}) diff --git a/test/e2e/cli.test.ts b/test/e2e/cli.test.ts new file mode 100644 index 00000000..0b9cfe90 --- /dev/null +++ b/test/e2e/cli.test.ts @@ -0,0 +1,10 @@ +import { describe, expect, it } from 'vitest' +import { runCli } from './test-utils.ts' + +describe('CLI', () => { + it('should list templates via CLI', async () => { + const { stdout, stderr } = await runCli('templates list') + expect(stderr).to.be.empty + expect(stdout).to.match(/[a-f0-9]{32}/) + }) +}) diff --git a/test/e2e/notifications.test.ts b/test/e2e/notifications.test.ts new file mode 100644 index 00000000..9a550dfc --- /dev/null +++ b/test/e2e/notifications.test.ts @@ -0,0 +1,20 @@ +import { describe, expect, it } from 'vitest' +import * as notifications from '../../src/cli/notifications.ts' +import OutputCtl from './OutputCtl.ts' +import type { OutputEntry } from './test-utils.ts' +import { testCase } from './test-utils.ts' + +describe('assembly-notifications', () => { + describe('list', () => { + // Skipped: notifications.list is not implemented in the SDK + it.skip( + 'should list notifications', + testCase(async (client) => { + const output = new OutputCtl() + await notifications.list(output, client, { pagesize: 1 }) + const logs = output.get() as OutputEntry[] + expect(logs.filter((l) => l.type === 'error')).to.have.lengthOf(0) + }), + ) + }) +}) diff --git a/test/e2e/templates.test.ts b/test/e2e/templates.test.ts new file mode 100644 index 00000000..cd4a3566 --- /dev/null +++ b/test/e2e/templates.test.ts @@ -0,0 +1,325 @@ +import fsp from 'node:fs/promises' +import path from 'node:path' +import { afterAll, beforeAll, describe, expect, it } from 'vitest' +import type { TemplateContent } from '../../src/apiTypes.ts' +import { zip } from '../../src/cli/helpers.ts' +import * as templates from '../../src/cli/templates.ts' +import { Transloadit as TransloaditClient } from '../../src/Transloadit.ts' +import OutputCtl from './OutputCtl.ts' +import type { OutputEntry } from './test-utils.ts' +import { authKey, authSecret, delay, testCase } from './test-utils.ts' + +describe('templates', () => { + describe('create', () => { + it( + 'should create templates', + testCase(async (client) => { + const executions = [1, 2, 3, 4, 5].map(async (n) => { + const output = new OutputCtl() + await fsp.writeFile(`${n}.json`, JSON.stringify({ testno: n })) + await templates.create(output, client, { name: `test-${n}`, file: `${n}.json` }) + return output.get() as OutputEntry[] + }) + + const results = await Promise.all(executions) + for (const result of results) { + expect(result).to.have.lengthOf(1) + expect(result).to.have.nested.property('[0].type').that.equals('print') + expect(result).to.have.nested.property('[0].msg').that.equals(result[0]?.json?.id) + + if (result[0]?.json?.id) { + await client.deleteTemplate(result[0].json.id).catch(() => {}) + } + } + }), + ) + }) + + describe('get', () => { + it( + 'should get templates', + testCase(async (client) => { + const response = await client.listTemplates({ pagesize: 5 }) + const templatesList = response.items + if (templatesList.length === 0) throw new Error('account has no templates to fetch') + + const expectations = await Promise.all( + templatesList.map((template) => client.getTemplate(template.id)), + ) + + const actuals = await Promise.all( + templatesList.map(async (template) => { + const output = new OutputCtl() + await templates.get(output, client, { templates: [template.id] }) + return output.get() as OutputEntry[] + }), + ) + + for (const [expectation, actual] of zip(expectations, actuals)) { + expect(actual).to.have.lengthOf(1) + expect(actual).to.have.nested.property('[0].type').that.equals('print') + expect(actual).to.have.nested.property('[0].json').that.deep.equals(expectation) + } + }), + ) + + it( + 'should return templates in the order specified', + testCase(async (client) => { + const response = await client.listTemplates({ pagesize: 5 }) + const items = response.items.sort(() => 2 * Math.floor(Math.random() * 2) - 1) + if (items.length === 0) throw new Error('account has no templates to fetch') + + const ids = items.map((template) => template.id) + + const output = new OutputCtl() + await templates.get(output, client, { templates: ids }) + const results = output.get() as OutputEntry[] + + expect(results).to.have.lengthOf(ids.length) + for (const [result, id] of zip(results, ids)) { + expect(result).to.have.property('type').that.equals('print') + expect(result).to.have.nested.property('json.id').that.equals(id) + } + }), + ) + }) + + describe('modify', () => { + let templateId: string + + beforeAll(async () => { + const client = new TransloaditClient({ authKey, authSecret }) + const response = await client.createTemplate({ + name: 'original-name', + template: { + steps: { dummy: { robot: '/html/convert', url: 'https://example.com' } }, + } as TemplateContent, + }) + templateId = response.id + }) + + it( + 'should modify but not rename the template', + testCase(async (client) => { + await fsp.writeFile('template.json', JSON.stringify({ stage: 1 })) + + const output = new OutputCtl() + await templates.modify(output, client, { + template: templateId, + file: 'template.json', + }) + const result = output.get() + + expect(result).to.have.lengthOf(0) + await delay(2000) + const template = await client.getTemplate(templateId) + expect(template).to.have.property('name').that.equals('original-name') + expect(template).to.have.property('content').that.has.property('steps') + }), + ) + + it( + 'should not modify but rename the template', + testCase(async (client) => { + await fsp.writeFile('template.json', '') + + const output = new OutputCtl() + await templates.modify(output, client, { + template: templateId, + name: 'new-name', + file: 'template.json', + }) + const result = output.get() + + expect(result).to.have.lengthOf(0) + await delay(2000) + const template = await client.getTemplate(templateId) + expect(template).to.have.property('name').that.equals('new-name') + expect(template).to.have.property('content').that.has.property('steps') + }), + ) + + it( + 'should modify and rename the template', + testCase(async (client) => { + await fsp.writeFile('template.json', JSON.stringify({ stage: 2 })) + + const output = new OutputCtl() + await templates.modify(output, client, { + template: templateId, + name: 'newer-name', + file: 'template.json', + }) + const result = output.get() + + expect(result).to.have.lengthOf(0) + await delay(2000) + const template = await client.getTemplate(templateId) + expect(template).to.have.property('name').that.equals('newer-name') + expect(template).to.have.property('content').that.has.property('steps') + }), + ) + + afterAll(async () => { + const client = new TransloaditClient({ authKey, authSecret }) + await client.deleteTemplate(templateId) + }) + }) + + describe('delete', () => { + it( + 'should delete templates', + testCase(async (client) => { + const ids = await Promise.all( + [1, 2, 3, 4, 5].map(async (n) => { + const response = await client.createTemplate({ + name: `delete-test-${n}`, + template: { + steps: { dummy: { robot: '/html/convert', url: `https://example.com/${n}` } }, + } as TemplateContent, + }) + return response.id + }), + ) + + const output = new OutputCtl() + await templates.delete(output, client, { templates: ids }) + const result = output.get() + + expect(result).to.have.lengthOf(0) + await Promise.all( + ids.map(async (id) => { + try { + const response = await client.getTemplate(id) + expect(response).to.not.exist + } catch (err) { + const error = err as { + code?: string + transloaditErrorCode?: string + response?: { body?: { error?: string } } + } + const errorCode = + error.code || error.transloaditErrorCode || error.response?.body?.error + if (errorCode !== 'TEMPLATE_NOT_FOUND') { + console.error('Delete failed with unexpected error:', err, 'Code:', errorCode) + throw err + } + } + }), + ) + }), + ) + }) + + describe('sync', () => { + it( + 'should handle directories recursively', + testCase(async (client) => { + const response = await client.listTemplates({ pagesize: 5 }) + const templateIds = response.items.map((item) => ({ id: item.id, name: item.name })) + + let dirname = 'd' + const files: string[] = [] + for (const { id, name } of templateIds) { + const fname = path.join(dirname, `${name}.json`) + await fsp.mkdir(dirname, { recursive: true }) + await fsp.writeFile(fname, `{"transloadit_template_id":"${id}"}`) + files.push(fname) + dirname = path.join(dirname, 'd') + } + + const output = new OutputCtl() + await templates.sync(output, client, { recursive: true, files: ['d'] }) + const result = output.get() + + expect(result).to.have.lengthOf(0) + const contents = await Promise.all( + files.map( + async (file) => JSON.parse(await fsp.readFile(file, 'utf8')) as Record, + ), + ) + for (const [content, idObj] of zip(contents, templateIds)) { + expect(content).to.have.property('transloadit_template_id').that.equals(idObj.id) + expect(content).to.have.property('steps') + } + }), + ) + + it( + 'should update local files when outdated', + testCase(async (client) => { + const params = { + name: `test-local-update-${Date.now()}`, + template: { + steps: { dummy: { robot: '/html/convert', url: 'https://example.com/changed' } }, + } as TemplateContent, + } + const response = await client.createTemplate(params) + const id = response.id + + try { + const fname = `${params.name}.json` + await fsp.writeFile( + fname, + JSON.stringify({ + transloadit_template_id: id, + steps: { changed: false }, + }), + ) + await fsp.utimes(fname, 0, 0) + + const output = new OutputCtl() + await templates.sync(output, client, { files: [fname] }) + const result = output.get() + + expect(result).to.have.lengthOf(0) + const content = JSON.parse(await fsp.readFile(fname, 'utf8')) as Record + expect(content).to.have.property('steps') + const fetchedTemplate = await client.getTemplate(id) + expect(fetchedTemplate).to.have.property('content').that.has.property('steps') + } finally { + await client.deleteTemplate(id).catch(() => {}) + } + }), + ) + + it( + 'should update remote template when outdated', + testCase(async (client) => { + const params = { + name: `test-remote-update-${Date.now()}`, + template: { + steps: { dummy: { robot: '/html/convert', url: 'https://example.com/unchanged' } }, + } as TemplateContent, + } + const response = await client.createTemplate(params) + const id = response.id + + try { + const fname = `${params.name}.json` + await fsp.writeFile( + fname, + JSON.stringify({ + transloadit_template_id: id, + steps: { changed: true }, + }), + ) + await fsp.utimes(fname, Date.now() * 2, Date.now() * 2) + + const output = new OutputCtl() + await templates.sync(output, client, { files: [fname] }) + const result = output.get() + + expect(result).to.have.lengthOf(0) + const content = JSON.parse(await fsp.readFile(fname, 'utf8')) as Record + expect(content).to.have.property('steps') + const fetchedTemplate = await client.getTemplate(id) + expect(fetchedTemplate).to.have.property('content').that.has.property('steps') + } finally { + await client.deleteTemplate(id).catch(() => {}) + } + }), + ) + }) +}) diff --git a/test/e2e/test-utils.ts b/test/e2e/test-utils.ts new file mode 100644 index 00000000..0cf3a88e --- /dev/null +++ b/test/e2e/test-utils.ts @@ -0,0 +1,70 @@ +import { exec } from 'node:child_process' +import fsp from 'node:fs/promises' +import path from 'node:path' +import process from 'node:process' +import { fileURLToPath } from 'node:url' +import { promisify } from 'node:util' +import { rimraf } from 'rimraf' +import 'dotenv/config' +import { Transloadit as TransloaditClient } from '../../src/Transloadit.ts' + +export const execAsync = promisify(exec) + +const __dirname = path.dirname(fileURLToPath(import.meta.url)) +export const cliPath = path.resolve(__dirname, '../../src/cli.ts') + +export const tmpDir = '/tmp' + +if (!process.env.TRANSLOADIT_KEY || !process.env.TRANSLOADIT_SECRET) { + console.error( + 'Please provide environment variables TRANSLOADIT_KEY and TRANSLOADIT_SECRET to run tests', + ) + process.exit(1) +} + +export const authKey = process.env.TRANSLOADIT_KEY +export const authSecret = process.env.TRANSLOADIT_SECRET + +process.setMaxListeners(Number.POSITIVE_INFINITY) + +export function delay(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)) +} + +export interface OutputEntry { + type: string + msg: unknown + json?: { id?: string; assembly_id?: string } & Record +} + +export function testCase(cb: (client: TransloaditClient) => Promise): () => Promise { + const cwd = process.cwd() + return async () => { + const dirname = path.join( + tmpDir, + `transloadit_test-${Date.now()}-${Math.floor(Math.random() * 10000)}`, + ) + const client = new TransloaditClient({ authKey, authSecret }) + try { + await fsp.mkdir(dirname) + process.chdir(dirname) + return await cb(client) + } finally { + process.chdir(cwd) + await rimraf(dirname) + } + } +} + +export function runCli( + args: string, + env: Record = {}, +): Promise<{ stdout: string; stderr: string }> { + return execAsync(`npx tsx ${cliPath} ${args}`, { + env: { ...process.env, ...env }, + }) +} + +export function createClient(): TransloaditClient { + return new TransloaditClient({ authKey, authSecret }) +} diff --git a/test/unit/test-cli.test.ts b/test/unit/test-cli.test.ts index 9dd7f3d9..105473e0 100644 --- a/test/unit/test-cli.test.ts +++ b/test/unit/test-cli.test.ts @@ -3,11 +3,10 @@ import { tmpdir } from 'node:os' import path from 'node:path' import { fileURLToPath } from 'node:url' import { afterEach, describe, expect, it, vi } from 'vitest' -import * as cli from '../../src/cli.ts' +import { runSig, runSmartSig } from '../../src/cli/commands/auth.ts' +import { main, shouldRunCli } from '../../src/cli.ts' import { Transloadit } from '../../src/Transloadit.ts' -const { main, runSig, runSmartSig, shouldRunCli } = cli - const resetExitCode = () => { process.exitCode = undefined } @@ -358,17 +357,13 @@ describe('cli sig', () => { }) describe('cli help', () => { - it('prints usage when no command is provided', async () => { + it('prints usage when --help is provided', async () => { const stdoutSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) - const stderrSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) - await main([]) + await main(['--help']) - expect(stderrSpy).not.toHaveBeenCalled() expect(stdoutSpy).toHaveBeenCalled() - const message = `${stdoutSpy.mock.calls[0]?.[0]}` - expect(message).toContain('npx transloadit smart_sig') - expect(message).toContain('npx transloadit sig') - expect(process.exitCode).toBe(1) + const message = stdoutSpy.mock.calls.map((call) => `${call[0]}`).join('') + expect(message).toContain('Transloadit CLI') }) }) diff --git a/vitest.config.ts b/vitest.config.ts index 7c34317f..28e30ee7 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -4,7 +4,7 @@ export default defineConfig({ test: { coverage: { include: ['src/**/*.ts'], - exclude: ['**/*.d.ts', '**/*.test.ts', '**/test/**', '**/alphalib/**'], + exclude: ['**/*.d.ts', '**/*.test.ts', '**/test/**', '**/alphalib/**', '**/cli/**'], reporter: ['json', 'lcov', 'text', 'clover', 'json-summary', 'html'], provider: 'v8', thresholds: { @@ -17,5 +17,7 @@ export default defineConfig({ }, }, globals: true, + testTimeout: 100000, + exclude: ['test/e2e/test-utils.ts', 'test/e2e/OutputCtl.ts'], }, }) diff --git a/yarn.lock b/yarn.lock index f42faea1..f9363fa6 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1012,6 +1012,22 @@ __metadata: languageName: node linkType: hard +"@isaacs/balanced-match@npm:^4.0.1": + version: 4.0.1 + resolution: "@isaacs/balanced-match@npm:4.0.1" + checksum: 10c0/7da011805b259ec5c955f01cee903da72ad97c5e6f01ca96197267d3f33103d5b2f8a1af192140f3aa64526c593c8d098ae366c2b11f7f17645d12387c2fd420 + languageName: node + linkType: hard + +"@isaacs/brace-expansion@npm:^5.0.0": + version: 5.0.0 + resolution: "@isaacs/brace-expansion@npm:5.0.0" + dependencies: + "@isaacs/balanced-match": "npm:^4.0.1" + checksum: 10c0/b4d4812f4be53afc2c5b6c545001ff7a4659af68d4484804e9d514e183d20269bb81def8682c01a22b17c4d6aed14292c8494f7d2ac664e547101c1a905aa977 + languageName: node + linkType: hard + "@isaacs/cliui@npm:^8.0.2": version: 8.0.2 resolution: "@isaacs/cliui@npm:8.0.2" @@ -1991,6 +2007,15 @@ __metadata: languageName: node linkType: hard +"@types/recursive-readdir@npm:^2.2.4": + version: 2.2.4 + resolution: "@types/recursive-readdir@npm:2.2.4" + dependencies: + "@types/node": "npm:*" + checksum: 10c0/089f1a66595587e62cd6464748d34bfc607e7122dc36d9747e0e8c6a690d4319d9c1427e3c8f60c0ab820ce035f4c8623887f8e5f6075582364d7a3c0cb1d004 + languageName: node + linkType: hard + "@types/temp@npm:^0.9.4": version: 0.9.4 resolution: "@types/temp@npm:0.9.4" @@ -2432,6 +2457,17 @@ __metadata: languageName: node linkType: hard +"clipanion@npm:^4.0.0-rc.4": + version: 4.0.0-rc.4 + resolution: "clipanion@npm:4.0.0-rc.4" + dependencies: + typanion: "npm:^3.8.0" + peerDependencies: + typanion: "*" + checksum: 10c0/047b415b59a5e9777d00690fba563ccc850eca6bf27790a88d1deea3ecc8a89840ae9aed554ff284cc698a9f3f20256e43c25ff4a7c4c90a71e5e7d9dca61dd1 + languageName: node + linkType: hard + "color-convert@npm:^0.5.2": version: 0.5.3 resolution: "color-convert@npm:0.5.3" @@ -2643,10 +2679,10 @@ __metadata: languageName: node linkType: hard -"dotenv@npm:^17.2.2": - version: 17.2.2 - resolution: "dotenv@npm:17.2.2" - checksum: 10c0/be66513504590aff6eccb14167625aed9bd42ce80547f4fe5d195860211971a7060949b57108dfaeaf90658f79e40edccd3f233f0a978bff507b5b1565ae162b +"dotenv@npm:^17.2.3": + version: 17.2.3 + resolution: "dotenv@npm:17.2.3" + checksum: 10c0/c884403209f713214a1b64d4d1defa4934c2aa5b0002f5a670ae298a51e3c3ad3ba79dfee2f8df49f01ae74290fcd9acdb1ab1d09c7bfb42b539036108bb2ba0 languageName: node linkType: hard @@ -3173,6 +3209,17 @@ __metadata: languageName: node linkType: hard +"glob@npm:^13.0.0": + version: 13.0.0 + resolution: "glob@npm:13.0.0" + dependencies: + minimatch: "npm:^10.1.1" + minipass: "npm:^7.1.2" + path-scurry: "npm:^2.0.0" + checksum: 10c0/8e2f5821f3f7c312dd102e23a15b80c79e0837a9872784293ba2e15ec73b3f3749a49a42a31bfcb4e52c84820a474e92331c2eebf18819d20308f5c33876630a + languageName: node + linkType: hard + "glob@npm:^7.1.3": version: 7.2.3 resolution: "glob@npm:7.2.3" @@ -3361,6 +3408,15 @@ __metadata: languageName: node linkType: hard +"image-size@npm:^2.0.2": + version: 2.0.2 + resolution: "image-size@npm:2.0.2" + bin: + image-size: bin/image-size.js + checksum: 10c0/f09dd0f7cf8511cd20e4f756bdb5a7cb6d2240de3323f41bde266bed8373392a293892bf12e907e2995f52833fd88dd27cf6b1a52ab93968afc716cb78cd7b79 + languageName: node + linkType: hard + "imurmurhash@npm:^0.1.4": version: 0.1.4 resolution: "imurmurhash@npm:0.1.4" @@ -3894,6 +3950,13 @@ __metadata: languageName: node linkType: hard +"lru-cache@npm:^11.0.0": + version: 11.2.4 + resolution: "lru-cache@npm:11.2.4" + checksum: 10c0/4a24f9b17537619f9144d7b8e42cd5a225efdfd7076ebe7b5e7dc02b860a818455201e67fbf000765233fe7e339d3c8229fc815e9b58ee6ede511e07608c19b2 + languageName: node + linkType: hard + "magic-string@npm:^0.30.17": version: 0.30.17 resolution: "magic-string@npm:0.30.17" @@ -3986,7 +4049,16 @@ __metadata: languageName: node linkType: hard -"minimatch@npm:^3.0.4, minimatch@npm:^3.1.1": +"minimatch@npm:^10.1.1": + version: 10.1.1 + resolution: "minimatch@npm:10.1.1" + dependencies: + "@isaacs/brace-expansion": "npm:^5.0.0" + checksum: 10c0/c85d44821c71973d636091fddbfbffe62370f5ee3caf0241c5b60c18cd289e916200acb2361b7e987558cd06896d153e25d505db9fc1e43e6b4b6752e2702902 + languageName: node + linkType: hard + +"minimatch@npm:^3.0.4, minimatch@npm:^3.0.5, minimatch@npm:^3.1.1": version: 3.1.2 resolution: "minimatch@npm:3.1.2" dependencies: @@ -4168,6 +4240,13 @@ __metadata: languageName: node linkType: hard +"node-watch@npm:^0.7.4": + version: 0.7.4 + resolution: "node-watch@npm:0.7.4" + checksum: 10c0/05c3e66e7b5013d64c31a6dd96b55d87c14c8c0515d05d73554d706a1f8b962fe31781dce74740db29c0ec7c9a1f33a6bac07ef1e8aecc0d38c5ab4eef4c7ac0 + languageName: node + linkType: hard + "nopt@npm:^8.0.0": version: 8.1.0 resolution: "nopt@npm:8.1.0" @@ -4307,7 +4386,7 @@ __metadata: languageName: node linkType: hard -"package-json-from-dist@npm:^1.0.0": +"package-json-from-dist@npm:^1.0.0, package-json-from-dist@npm:^1.0.1": version: 1.0.1 resolution: "package-json-from-dist@npm:1.0.1" checksum: 10c0/62ba2785eb655fec084a257af34dbe24292ab74516d6aecef97ef72d4897310bc6898f6c85b5cd22770eaa1ce60d55a0230e150fb6a966e3ecd6c511e23d164b @@ -4376,6 +4455,16 @@ __metadata: languageName: node linkType: hard +"path-scurry@npm:^2.0.0": + version: 2.0.1 + resolution: "path-scurry@npm:2.0.1" + dependencies: + lru-cache: "npm:^11.0.0" + minipass: "npm:^7.1.2" + checksum: 10c0/2a16ed0e81fbc43513e245aa5763354e25e787dab0d539581a6c3f0f967461a159ed6236b2559de23aa5b88e7dc32b469b6c47568833dd142a4b24b4f5cd2620 + languageName: node + linkType: hard + "path-type@npm:^3.0.0": version: 3.0.0 resolution: "path-type@npm:3.0.0" @@ -4516,6 +4605,15 @@ __metadata: languageName: node linkType: hard +"recursive-readdir@npm:^2.2.3": + version: 2.2.3 + resolution: "recursive-readdir@npm:2.2.3" + dependencies: + minimatch: "npm:^3.0.5" + checksum: 10c0/d0238f137b03af9cd645e1e0b40ae78b6cda13846e3ca57f626fcb58a66c79ae018a10e926b13b3a460f1285acc946a4e512ea8daa2e35df4b76a105709930d1 + languageName: node + linkType: hard + "reflect.getprototypeof@npm:^1.0.6, reflect.getprototypeof@npm:^1.0.9": version: 1.0.10 resolution: "reflect.getprototypeof@npm:1.0.10" @@ -4609,6 +4707,18 @@ __metadata: languageName: node linkType: hard +"rimraf@npm:^6.1.2": + version: 6.1.2 + resolution: "rimraf@npm:6.1.2" + dependencies: + glob: "npm:^13.0.0" + package-json-from-dist: "npm:^1.0.1" + bin: + rimraf: dist/esm/bin.mjs + checksum: 10c0/c11a6a6fad937ada03c12fe688860690df8296d7cd08dbe59e3cc087f44e43573ae26ecbe48e54cb7a6db745b8c81fe5a15b9359233cc21d52d9b5b3330fcc74 + languageName: node + linkType: hard + "rimraf@npm:~2.6.2": version: 2.6.3 resolution: "rimraf@npm:2.6.3" @@ -5242,20 +5352,26 @@ __metadata: "@biomejs/biome": "npm:^2.2.4" "@transloadit/sev-logger": "npm:^0.0.15" "@types/debug": "npm:^4.1.12" + "@types/recursive-readdir": "npm:^2.2.4" "@types/temp": "npm:^0.9.4" "@vitest/coverage-v8": "npm:^3.2.4" badge-maker: "npm:^5.0.2" + clipanion: "npm:^4.0.0-rc.4" debug: "npm:^4.4.3" - dotenv: "npm:^17.2.2" + dotenv: "npm:^17.2.3" execa: "npm:9.6.0" form-data: "npm:^4.0.4" got: "npm:14.4.9" + image-size: "npm:^2.0.2" into-stream: "npm:^9.0.0" is-stream: "npm:^4.0.1" nock: "npm:^14.0.10" + node-watch: "npm:^0.7.4" npm-run-all: "npm:^4.1.5" p-map: "npm:^7.0.3" p-retry: "npm:^7.0.0" + recursive-readdir: "npm:^2.2.3" + rimraf: "npm:^6.1.2" temp: "npm:^0.9.4" tsx: "npm:4.20.5" tus-js-client: "npm:^4.3.1" @@ -5306,6 +5422,13 @@ __metadata: languageName: node linkType: hard +"typanion@npm:^3.8.0": + version: 3.14.0 + resolution: "typanion@npm:3.14.0" + checksum: 10c0/8b03b19844e6955bfd906c31dc781bae6d7f1fb3ce4fe24b7501557013d4889ae5cefe671dafe98d87ead0adceb8afcb8bc16df7dc0bd2b7331bac96f3a7cae2 + languageName: node + linkType: hard + "type-fest@npm:^4.26.1, type-fest@npm:^4.41.0": version: 4.41.0 resolution: "type-fest@npm:4.41.0" From 52952731c6bb2f497582a693d956f699c74c75aa Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Tue, 2 Dec 2025 11:20:25 +0100 Subject: [PATCH 02/45] use cli subdir. more re-use of code --- .../workflows/{integration.yml => e2e.yml} | 6 +- CONTRIBUTING.md | 8 +- MIGRATION.md | 38 +-- package.json | 5 +- src/cli/JobsPromise.ts | 49 +++- src/cli/assemblies-create.ts | 8 +- src/cli/assemblies.ts | 10 +- src/cli/commands/auth.ts | 271 +++++++----------- src/cli/helpers.ts | 31 +- src/cli/templates.ts | 18 +- test/e2e/{ => cli}/OutputCtl.ts | 2 +- test/e2e/{ => cli}/assemblies.test.ts | 6 +- test/e2e/{ => cli}/bills.test.ts | 2 +- test/e2e/{ => cli}/cli.test.ts | 0 test/e2e/{ => cli}/notifications.test.ts | 2 +- test/e2e/{ => cli}/templates.test.ts | 8 +- test/e2e/{ => cli}/test-utils.ts | 4 +- .../fixtures/zerobytes.jpg | 0 test/{integration => e2e}/live-api.test.ts | 0 test/unit/{ => cli}/test-cli.test.ts | 8 +- vitest.config.ts | 2 +- 21 files changed, 189 insertions(+), 289 deletions(-) rename .github/workflows/{integration.yml => e2e.yml} (95%) rename test/e2e/{ => cli}/OutputCtl.ts (94%) rename test/e2e/{ => cli}/assemblies.test.ts (98%) rename test/e2e/{ => cli}/bills.test.ts (93%) rename test/e2e/{ => cli}/cli.test.ts (100%) rename test/e2e/{ => cli}/notifications.test.ts (90%) rename test/e2e/{ => cli}/templates.test.ts (97%) rename test/e2e/{ => cli}/test-utils.ts (92%) rename test/{integration => e2e}/fixtures/zerobytes.jpg (100%) rename test/{integration => e2e}/live-api.test.ts (100%) rename test/unit/{ => cli}/test-cli.test.ts (97%) diff --git a/.github/workflows/integration.yml b/.github/workflows/e2e.yml similarity index 95% rename from .github/workflows/integration.yml rename to .github/workflows/e2e.yml index 6aee7c8f..7e2d220f 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/e2e.yml @@ -1,4 +1,4 @@ -name: Integration tests +name: e2e tests on: workflow_dispatch: @@ -12,7 +12,7 @@ jobs: test: runs-on: ubuntu-latest strategy: - # Integration tests are not yet ready to run in parallel + # e2e tests are not yet ready to run in parallel max-parallel: 1 matrix: node: @@ -33,7 +33,7 @@ jobs: # - name: Setup tmate session # uses: mxschmitt/action-tmate@v3 - - run: corepack yarn test:all + - run: corepack yarn test env: TRANSLOADIT_KEY: ${{ secrets.TRANSLOADIT_KEY }} TRANSLOADIT_SECRET: ${{ secrets.TRANSLOADIT_SECRET }} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9eda24b6..d0e048f5 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -50,9 +50,9 @@ yarn test:unit This will also generate a coverage report in the `coverage` directory. -### Integration tests +### e2e tests -Integration tests are in the [`test/integration`](test/integration) folder. They require some extra setup. +e2e tests are in the [`test/e2e`](test/e2e) folder. They require some extra setup. Firstly, these tests require the Cloudflare executable. You can download this with: @@ -63,10 +63,10 @@ chmod +x cloudflared-linux-amd64 They also require a Transloadit key and secret, which you can get from https://transloadit.com/c/credentials. -You can run the integration tests with: +You can run the e2e tests with: ```sh -TRANSLOADIT_KEY='YOUR_TRANSLOADIT_KEY' TRANSLOADIT_SECRET='YOUR_TRANSLOADIT_SECRET' CLOUDFLARED_PATH='./cloudflared-linux-amd64' yarn test:integration +TRANSLOADIT_KEY='YOUR_TRANSLOADIT_KEY' TRANSLOADIT_SECRET='YOUR_TRANSLOADIT_SECRET' CLOUDFLARED_PATH='./cloudflared-linux-amd64' yarn test:e2e ``` ### Code Coverage diff --git a/MIGRATION.md b/MIGRATION.md index 83b5528b..50ebcf7a 100644 --- a/MIGRATION.md +++ b/MIGRATION.md @@ -19,11 +19,11 @@ Version 4 focuses on type-safety, clearer errors, and modern Node support. Most ```js // CommonJS import example async function getClient() { - const { Transloadit } = await import("transloadit"); + const { Transloadit } = await import('transloadit') return new Transloadit({ - authKey: process.env.TRANSLOADIT_KEY ?? "", - authSecret: process.env.TRANSLOADIT_SECRET ?? "", - }); + authKey: process.env.TRANSLOADIT_KEY ?? '', + authSecret: process.env.TRANSLOADIT_SECRET ?? '', + }) } ``` @@ -48,16 +48,16 @@ The package also exports `AssemblyInstructionsInput`, `AssemblyIndexItem`, `Asse const params: AssemblyInstructionsInput = { steps: { resize: { - use: ":original", - robot: "/image/resize", + use: ':original', + robot: '/image/resize', width: 320, height: 240, result: true, }, }, -}; +} -await transloadit.createAssembly({ params, waitForCompletion: true }); +await transloadit.createAssembly({ params, waitForCompletion: true }) ``` ## 3. Adjust API result handling @@ -82,15 +82,15 @@ const createdAssembly = await transloadit.createAssembly(...); ```ts try { - await transloadit.createAssembly({ params }); + await transloadit.createAssembly({ params }) } catch (error) { if (error instanceof ApiError && error.response.assembly_id) { console.error( - "Troubleshoot at https://transloadit.com/c/assemblies/" + + 'Troubleshoot at https://transloadit.com/c/assemblies/' + error.response.assembly_id - ); + ) } - throw error; + throw error } ``` @@ -103,18 +103,18 @@ try { authKey, authSecret, validateResponses: true, - }); + }) ``` - `getSignedSmartCDNUrl` generates Smart CDN URLs with signatures that match the server-side implementation: ```ts const signedUrl = transloadit.getSignedSmartCDNUrl({ - workspace: "my-team", - template: "hero-image", - input: "landing.jpg", - urlParams: { format: "webp" }, - }); + workspace: 'my-team', + template: 'hero-image', + input: 'landing.jpg', + urlParams: { format: 'webp' }, + }) ``` ## 6. Removed `createAssembly` callback support @@ -135,7 +135,7 @@ As a consequence of upgrading `got` to v14, the `gotRetry` option no longer acce ## Testing & troubleshooting -- Run your existing integration tests on Node 20+. If you relied on CommonJS `require`, convert those modules or wrap calls in `import()` shims as shown above. +- Run your existing e2e tests on Node 20+. If you relied on CommonJS `require`, convert those modules or wrap calls in `import()` shims as shown above. - If TypeScript raises errors about unfamiliar properties, import the respective types from `transloadit` instead of redefining them. - Schemas intentionally mirror the current public API. Some properties remain permissive while we tighten validation in the API itself; report gaps if the SDK raises or misses invalid data. diff --git a/package.json b/package.json index fd0911ab..28434e1c 100644 --- a/package.json +++ b/package.json @@ -67,13 +67,10 @@ "lint:js": "biome check .", "lint": "npm-run-all --parallel 'lint:js'", "fix": "npm-run-all --serial 'fix:js'", - "next:update": "next-update --keep true --tldr", "prepack": "rm -f tsconfig.tsbuildinfo tsconfig.build.tsbuildinfo && tsc --build tsconfig.build.json", "test:unit": "vitest run --coverage ./test/unit", - "test:integration": "vitest run ./test/integration", "test:e2e": "vitest run ./test/e2e", - "test:all": "vitest run --coverage", - "test": "yarn test:unit" + "test": "vitest run --coverage" }, "license": "MIT", "main": "./dist/Transloadit.js", diff --git a/src/cli/JobsPromise.ts b/src/cli/JobsPromise.ts index 941db468..4cc582b2 100644 --- a/src/cli/JobsPromise.ts +++ b/src/cli/JobsPromise.ts @@ -1,27 +1,46 @@ -import { EventEmitter } from 'node:events' +/** + * Tracks a collection of promises and emits errors as they occur. + * Used to run multiple async operations in parallel while: + * 1. Reporting errors as they happen (via onError callback) + * 2. Waiting for all operations to complete at the end + */ +export default class JobsPromise { + private promises: Set> = new Set() + private onError: ((err: unknown) => void) | null = null -export default class JobsPromise extends EventEmitter { - private promises: Set> - - constructor() { - super() - this.promises = new Set() + /** + * Set the error handler for individual promise rejections. + * Errors are reported immediately when promises reject. + */ + setErrorHandler(handler: (err: unknown) => void): void { + this.onError = handler } + /** + * Add a promise to track. If the promise rejects, + * the error handler will be called. + */ add(promise: Promise): void { this.promises.add(promise) promise - .finally(() => this.promises.delete(promise)) .catch((err: unknown) => { - this.emit('error', err) + this.onError?.(err) + }) + .finally(() => { + this.promises.delete(promise) }) } - promise(): Promise { - const promises: Promise[] = [] - for (const promise of this.promises) { - promises.push(promise) - } - return Promise.all(promises) + /** + * Wait for all tracked promises to settle. + * Returns array of fulfilled values (rejects are filtered out + * since errors were already handled via the error handler). + */ + async allSettled(): Promise { + const promises = [...this.promises] + const results = await Promise.allSettled(promises) + return results + .filter((r): r is PromiseFulfilledResult => r.status === 'fulfilled') + .map((r) => r.value) } } diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index 87a6a5d8..ce00fc70 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -449,7 +449,7 @@ function dismissStaleJobs(jobEmitter: EventEmitter): MyEventEmitter { const jobsPromise = new JobsPromise() - jobEmitter.on('end', () => jobsPromise.promise().then(() => emitter.emit('end'))) + jobEmitter.on('end', () => jobsPromise.allSettled().then(() => emitter.emit('end'))) jobEmitter.on('error', (err: Error) => emitter.emit('error', err)) jobEmitter.on('job', (job: Job) => { if (job.in == null || job.out == null) { @@ -730,8 +730,8 @@ export default async function run( } }) - jobsPromise.on('error', (err: Error) => { - outputctl.error(err) + jobsPromise.setErrorHandler((err: unknown) => { + outputctl.error(err as Error) }) emitter.on('error', (err: Error) => { @@ -740,7 +740,7 @@ export default async function run( }) emitter.on('end', () => { - resolve(jobsPromise.promise()) + resolve(jobsPromise.allSettled()) }) }) } diff --git a/src/cli/assemblies.ts b/src/cli/assemblies.ts index a6d627d8..2330662b 100644 --- a/src/cli/assemblies.ts +++ b/src/cli/assemblies.ts @@ -2,7 +2,7 @@ import { z } from 'zod' import { tryCatch } from '../alphalib/tryCatch.ts' import type { Transloadit } from '../Transloadit.ts' import assembliesCreate from './assemblies-create.ts' -import { createReadStream, formatAPIError, stream2buf } from './helpers.ts' +import { createReadStream, formatAPIError, streamToBuffer } from './helpers.ts' import type { IOutputCtl } from './OutputCtl.ts' import { ensureError } from './types.ts' @@ -112,13 +112,7 @@ export async function replay( ): Promise { if (steps) { try { - const buf = await new Promise((resolve, reject) => { - stream2buf(createReadStream(steps), (err, buf) => { - if (err) reject(err) - else if (buf) resolve(buf) - else reject(new Error('No buffer received')) - }) - }) + const buf = await streamToBuffer(createReadStream(steps)) const parsed: unknown = JSON.parse(buf.toString()) const validated = StepsSchema.safeParse(parsed) if (!validated.success) { diff --git a/src/cli/commands/auth.ts b/src/cli/commands/auth.ts index 30401e09..f7ddd8e5 100644 --- a/src/cli/commands/auth.ts +++ b/src/cli/commands/auth.ts @@ -77,7 +77,7 @@ async function readStdin(): Promise { return data } -function ensureCredentials(): { authKey: string; authSecret: string } | null { +function getCredentials(): { authKey: string; authSecret: string } | null { const authKey = process.env.TRANSLOADIT_KEY || process.env.TRANSLOADIT_AUTH_KEY const authSecret = process.env.TRANSLOADIT_SECRET || process.env.TRANSLOADIT_AUTH_SECRET @@ -88,34 +88,16 @@ function ensureCredentials(): { authKey: string; authSecret: string } | null { return { authKey, authSecret } } -// Testable helper functions exported for unit tests -export interface RunSigOptions { - providedInput?: string - algorithm?: string -} +// Result type for signature operations +type SigResult = { ok: true; output: string } | { ok: false; error: string } -export interface RunSmartSigOptions { - providedInput?: string -} - -function fail(message: string): void { - console.error(message) - process.exitCode = 1 -} - -export async function runSig(options: RunSigOptions = {}): Promise { - const credentials = ensureCredentials() - if (credentials == null) { - fail( - 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', - ) - return - } +// Core logic for signature generation +function generateSignature( + input: string, + credentials: { authKey: string; authSecret: string }, + algorithm?: string, +): SigResult { const { authKey, authSecret } = credentials - const { providedInput, algorithm } = options - - const rawInput = providedInput ?? (await readStdin()) - const input = rawInput.trim() let params: Record if (input === '') { @@ -125,19 +107,16 @@ export async function runSig(options: RunSigOptions = {}): Promise { try { parsed = JSON.parse(input) } catch (error) { - fail(`Failed to parse JSON from stdin: ${(error as Error).message}`) - return + return { ok: false, error: `Failed to parse JSON from stdin: ${(error as Error).message}` } } if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { - fail('Invalid params provided via stdin. Expected a JSON object.') - return + return { ok: false, error: 'Invalid params provided via stdin. Expected a JSON object.' } } const parsedResult = cliSignatureParamsSchema.safeParse(parsed) if (!parsedResult.success) { - fail(`Invalid params: ${formatIssues(parsedResult.error.issues)}`) - return + return { ok: false, error: `Invalid params: ${formatIssues(parsedResult.error.issues)}` } } const parsedParams = parsedResult.data as Record @@ -160,48 +139,41 @@ export async function runSig(options: RunSigOptions = {}): Promise { const client = new Transloadit({ authKey, authSecret }) try { const signature = client.calcSignature(params as OptionalAuthParams, algorithm) - process.stdout.write(`${JSON.stringify(signature)}\n`) + return { ok: true, output: JSON.stringify(signature) } } catch (error) { - fail(`Failed to generate signature: ${(error as Error).message}`) + return { ok: false, error: `Failed to generate signature: ${(error as Error).message}` } } } -export async function runSmartSig(options: RunSmartSigOptions = {}): Promise { - const credentials = ensureCredentials() - if (credentials == null) { - fail( - 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', - ) - return - } +// Core logic for Smart CDN URL generation +function generateSmartCdnUrl( + input: string, + credentials: { authKey: string; authSecret: string }, +): SigResult { const { authKey, authSecret } = credentials - const rawInput = options.providedInput ?? (await readStdin()) - const input = rawInput.trim() if (input === '') { - fail( - 'Missing params provided via stdin. Expected a JSON object with workspace, template, input, and optional Smart CDN parameters.', - ) - return + return { + ok: false, + error: + 'Missing params provided via stdin. Expected a JSON object with workspace, template, input, and optional Smart CDN parameters.', + } } let parsed: unknown try { parsed = JSON.parse(input) } catch (error) { - fail(`Failed to parse JSON from stdin: ${(error as Error).message}`) - return + return { ok: false, error: `Failed to parse JSON from stdin: ${(error as Error).message}` } } if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { - fail('Invalid params provided via stdin. Expected a JSON object.') - return + return { ok: false, error: 'Invalid params provided via stdin. Expected a JSON object.' } } const parsedResult = smartCdnParamsSchema.safeParse(parsed) if (!parsedResult.success) { - fail(`Invalid params: ${formatIssues(parsedResult.error.issues)}`) - return + return { ok: false, error: `Invalid params: ${formatIssues(parsedResult.error.issues)}` } } const { workspace, template, input: inputFieldRaw, url_params, expire_at_ms } = parsedResult.data @@ -211,8 +183,7 @@ export async function runSmartSig(options: RunSmartSigOptions = {}): Promise { + const credentials = getCredentials() + if (credentials == null) { + console.error( + 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', + ) + process.exitCode = 1 + return + } + + const rawInput = options.providedInput ?? (await readStdin()) + const result = generateSignature(rawInput.trim(), credentials, options.algorithm) + + if (result.ok) { + process.stdout.write(`${result.output}\n`) + } else { + console.error(result.error) + process.exitCode = 1 + } +} + +export async function runSmartSig(options: RunSmartSigOptions = {}): Promise { + const credentials = getCredentials() + if (credentials == null) { + console.error( + 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', + ) + process.exitCode = 1 + return + } + + const rawInput = options.providedInput ?? (await readStdin()) + const result = generateSmartCdnUrl(rawInput.trim(), credentials) + + if (result.ok) { + process.stdout.write(`${result.output}\n`) + } else { + console.error(result.error) + process.exitCode = 1 } } @@ -266,68 +289,24 @@ export class SignatureCommand extends UnauthenticatedCommand { }) protected async run(): Promise { - const credentials = ensureCredentials() + const credentials = getCredentials() if (credentials == null) { this.output.error( 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', ) return 1 } - const { authKey, authSecret } = credentials const rawInput = await readStdin() - const input = rawInput.trim() - let params: Record - - if (input === '') { - params = { auth: { key: authKey } } - } else { - let parsed: unknown - try { - parsed = JSON.parse(input) - } catch (error) { - this.output.error(`Failed to parse JSON from stdin: ${(error as Error).message}`) - return 1 - } - - if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { - this.output.error('Invalid params provided via stdin. Expected a JSON object.') - return 1 - } - - const parsedResult = cliSignatureParamsSchema.safeParse(parsed) - if (!parsedResult.success) { - this.output.error(`Invalid params: ${formatIssues(parsedResult.error.issues)}`) - return 1 - } - - const parsedParams = parsedResult.data as Record - const existingAuth = - typeof parsedParams.auth === 'object' && - parsedParams.auth != null && - !Array.isArray(parsedParams.auth) - ? (parsedParams.auth as Record) - : {} - - params = { - ...parsedParams, - auth: { - ...existingAuth, - key: authKey, - }, - } - } + const result = generateSignature(rawInput.trim(), credentials, this.algorithm) - const client = new Transloadit({ authKey, authSecret }) - try { - const signature = client.calcSignature(params as OptionalAuthParams, this.algorithm) - process.stdout.write(`${JSON.stringify(signature)}\n`) - } catch (error) { - this.output.error(`Failed to generate signature: ${(error as Error).message}`) - return 1 + if (result.ok) { + process.stdout.write(`${result.output}\n`) + return undefined } - return undefined + this.output.error(result.error) + return 1 } } @@ -363,81 +342,23 @@ export class SmartCdnSignatureCommand extends UnauthenticatedCommand { }) protected async run(): Promise { - const credentials = ensureCredentials() + const credentials = getCredentials() if (credentials == null) { this.output.error( 'Missing credentials. Please set TRANSLOADIT_KEY and TRANSLOADIT_SECRET environment variables.', ) return 1 } - const { authKey, authSecret } = credentials const rawInput = await readStdin() - const input = rawInput.trim() - if (input === '') { - this.output.error( - 'Missing params provided via stdin. Expected a JSON object with workspace, template, input, and optional Smart CDN parameters.', - ) - return 1 - } - - let parsed: unknown - try { - parsed = JSON.parse(input) - } catch (error) { - this.output.error(`Failed to parse JSON from stdin: ${(error as Error).message}`) - return 1 - } - - if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { - this.output.error('Invalid params provided via stdin. Expected a JSON object.') - return 1 - } + const result = generateSmartCdnUrl(rawInput.trim(), credentials) - const parsedResult = smartCdnParamsSchema.safeParse(parsed) - if (!parsedResult.success) { - this.output.error(`Invalid params: ${formatIssues(parsedResult.error.issues)}`) - return 1 - } - - const { - workspace, - template, - input: inputFieldRaw, - url_params, - expire_at_ms, - } = parsedResult.data - const urlParams = normalizeUrlParams(url_params as Record | undefined) - - let expiresAt: number | undefined - if (typeof expire_at_ms === 'string') { - const parsedNumber = Number.parseInt(expire_at_ms, 10) - if (Number.isNaN(parsedNumber)) { - this.output.error('Invalid params: expire_at_ms must be a number.') - return 1 - } - expiresAt = parsedNumber - } else { - expiresAt = expire_at_ms - } - - const inputField = typeof inputFieldRaw === 'string' ? inputFieldRaw : String(inputFieldRaw) - - const client = new Transloadit({ authKey, authSecret }) - try { - const signedUrl = client.getSignedSmartCDNUrl({ - workspace, - template, - input: inputField, - urlParams, - expiresAt, - }) - process.stdout.write(`${signedUrl}\n`) - } catch (error) { - this.output.error(`Failed to generate Smart CDN URL: ${(error as Error).message}`) - return 1 + if (result.ok) { + process.stdout.write(`${result.output}\n`) + return undefined } - return undefined + this.output.error(result.error) + return 1 } } diff --git a/src/cli/helpers.ts b/src/cli/helpers.ts index fd09641b..ef65b9f9 100644 --- a/src/cli/helpers.ts +++ b/src/cli/helpers.ts @@ -8,31 +8,12 @@ export function createReadStream(file: string): Readable { return fs.createReadStream(file) } -export function stream2buf(stream: Readable, cb: (err: Error | null, buf?: Buffer) => void): void { - let size = 0 - const bufs: Buffer[] = [] - - stream.on('error', cb) - - stream.on('readable', () => { - const chunk = stream.read() as Buffer | null - if (chunk === null) return - - size += chunk.length - bufs.push(chunk) - }) - - stream.on('end', () => { - const buf = Buffer.alloc(size) - let offset = 0 - - for (const b of bufs) { - b.copy(buf, offset) - offset += b.length - } - - cb(null, buf) - }) +export async function streamToBuffer(stream: Readable): Promise { + const chunks: Buffer[] = [] + for await (const chunk of stream) { + chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)) + } + return Buffer.concat(chunks) } export function formatAPIError(err: unknown): string { diff --git a/src/cli/templates.ts b/src/cli/templates.ts index db8eadfc..78de1048 100644 --- a/src/cli/templates.ts +++ b/src/cli/templates.ts @@ -6,7 +6,7 @@ import { z } from 'zod' import { tryCatch } from '../alphalib/tryCatch.ts' import type { TemplateContent } from '../apiTypes.ts' import type { Transloadit } from '../Transloadit.ts' -import { createReadStream, formatAPIError, stream2buf } from './helpers.ts' +import { createReadStream, formatAPIError, streamToBuffer } from './helpers.ts' import type { IOutputCtl } from './OutputCtl.ts' import ModifiedLookup from './template-last-modified.ts' import type { TemplateFile } from './types.ts' @@ -54,13 +54,7 @@ export async function create( { name, file }: TemplateCreateOptions, ): Promise { try { - const buf = await new Promise((resolve, reject) => { - stream2buf(createReadStream(file), (err, buf) => { - if (err) reject(err) - else if (buf) resolve(buf) - else reject(new Error('No buffer received')) - }) - }) + const buf = await streamToBuffer(createReadStream(file)) const parsed: unknown = JSON.parse(buf.toString()) const validated = StepsSchema.safeParse(parsed) @@ -105,13 +99,7 @@ export async function modify( { template, name, file }: TemplateModifyOptions, ): Promise { try { - const buf = await new Promise((resolve, reject) => { - stream2buf(createReadStream(file), (err, buf) => { - if (err) reject(err) - else if (buf) resolve(buf) - else reject(new Error('No buffer received')) - }) - }) + const buf = await streamToBuffer(createReadStream(file)) let json: Record | null = null let newName = name diff --git a/test/e2e/OutputCtl.ts b/test/e2e/cli/OutputCtl.ts similarity index 94% rename from test/e2e/OutputCtl.ts rename to test/e2e/cli/OutputCtl.ts index 0fefa189..d63cbde5 100644 --- a/test/e2e/OutputCtl.ts +++ b/test/e2e/cli/OutputCtl.ts @@ -1,4 +1,4 @@ -import type { OutputCtlOptions } from '../../src/cli/OutputCtl.ts' +import type { OutputCtlOptions } from '../../../src/cli/OutputCtl.ts' interface OutputEntry { type: 'error' | 'warn' | 'info' | 'debug' | 'print' diff --git a/test/e2e/assemblies.test.ts b/test/e2e/cli/assemblies.test.ts similarity index 98% rename from test/e2e/assemblies.test.ts rename to test/e2e/cli/assemblies.test.ts index b988dbba..16c8d4c4 100644 --- a/test/e2e/assemblies.test.ts +++ b/test/e2e/cli/assemblies.test.ts @@ -4,9 +4,9 @@ import { promisify } from 'node:util' import { imageSize } from 'image-size' import rreaddir from 'recursive-readdir' import { describe, expect, it } from 'vitest' -import * as assemblies from '../../src/cli/assemblies.ts' -import assembliesCreate from '../../src/cli/assemblies-create.ts' -import { zip } from '../../src/cli/helpers.ts' +import * as assemblies from '../../../src/cli/assemblies.ts' +import assembliesCreate from '../../../src/cli/assemblies-create.ts' +import { zip } from '../../../src/cli/helpers.ts' import OutputCtl from './OutputCtl.ts' import type { OutputEntry } from './test-utils.ts' import { testCase } from './test-utils.ts' diff --git a/test/e2e/bills.test.ts b/test/e2e/cli/bills.test.ts similarity index 93% rename from test/e2e/bills.test.ts rename to test/e2e/cli/bills.test.ts index a4cc597d..970d4634 100644 --- a/test/e2e/bills.test.ts +++ b/test/e2e/cli/bills.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from 'vitest' -import * as bills from '../../src/cli/bills.ts' +import * as bills from '../../../src/cli/bills.ts' import OutputCtl from './OutputCtl.ts' import type { OutputEntry } from './test-utils.ts' import { testCase } from './test-utils.ts' diff --git a/test/e2e/cli.test.ts b/test/e2e/cli/cli.test.ts similarity index 100% rename from test/e2e/cli.test.ts rename to test/e2e/cli/cli.test.ts diff --git a/test/e2e/notifications.test.ts b/test/e2e/cli/notifications.test.ts similarity index 90% rename from test/e2e/notifications.test.ts rename to test/e2e/cli/notifications.test.ts index 9a550dfc..b7d060b9 100644 --- a/test/e2e/notifications.test.ts +++ b/test/e2e/cli/notifications.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from 'vitest' -import * as notifications from '../../src/cli/notifications.ts' +import * as notifications from '../../../src/cli/notifications.ts' import OutputCtl from './OutputCtl.ts' import type { OutputEntry } from './test-utils.ts' import { testCase } from './test-utils.ts' diff --git a/test/e2e/templates.test.ts b/test/e2e/cli/templates.test.ts similarity index 97% rename from test/e2e/templates.test.ts rename to test/e2e/cli/templates.test.ts index cd4a3566..70423ede 100644 --- a/test/e2e/templates.test.ts +++ b/test/e2e/cli/templates.test.ts @@ -1,10 +1,10 @@ import fsp from 'node:fs/promises' import path from 'node:path' import { afterAll, beforeAll, describe, expect, it } from 'vitest' -import type { TemplateContent } from '../../src/apiTypes.ts' -import { zip } from '../../src/cli/helpers.ts' -import * as templates from '../../src/cli/templates.ts' -import { Transloadit as TransloaditClient } from '../../src/Transloadit.ts' +import type { TemplateContent } from '../../../src/apiTypes.ts' +import { zip } from '../../../src/cli/helpers.ts' +import * as templates from '../../../src/cli/templates.ts' +import { Transloadit as TransloaditClient } from '../../../src/Transloadit.ts' import OutputCtl from './OutputCtl.ts' import type { OutputEntry } from './test-utils.ts' import { authKey, authSecret, delay, testCase } from './test-utils.ts' diff --git a/test/e2e/test-utils.ts b/test/e2e/cli/test-utils.ts similarity index 92% rename from test/e2e/test-utils.ts rename to test/e2e/cli/test-utils.ts index 0cf3a88e..799cd2e5 100644 --- a/test/e2e/test-utils.ts +++ b/test/e2e/cli/test-utils.ts @@ -6,12 +6,12 @@ import { fileURLToPath } from 'node:url' import { promisify } from 'node:util' import { rimraf } from 'rimraf' import 'dotenv/config' -import { Transloadit as TransloaditClient } from '../../src/Transloadit.ts' +import { Transloadit as TransloaditClient } from '../../../src/Transloadit.ts' export const execAsync = promisify(exec) const __dirname = path.dirname(fileURLToPath(import.meta.url)) -export const cliPath = path.resolve(__dirname, '../../src/cli.ts') +export const cliPath = path.resolve(__dirname, '../../../src/cli.ts') export const tmpDir = '/tmp' diff --git a/test/integration/fixtures/zerobytes.jpg b/test/e2e/fixtures/zerobytes.jpg similarity index 100% rename from test/integration/fixtures/zerobytes.jpg rename to test/e2e/fixtures/zerobytes.jpg diff --git a/test/integration/live-api.test.ts b/test/e2e/live-api.test.ts similarity index 100% rename from test/integration/live-api.test.ts rename to test/e2e/live-api.test.ts diff --git a/test/unit/test-cli.test.ts b/test/unit/cli/test-cli.test.ts similarity index 97% rename from test/unit/test-cli.test.ts rename to test/unit/cli/test-cli.test.ts index 105473e0..f630cb20 100644 --- a/test/unit/test-cli.test.ts +++ b/test/unit/cli/test-cli.test.ts @@ -3,9 +3,9 @@ import { tmpdir } from 'node:os' import path from 'node:path' import { fileURLToPath } from 'node:url' import { afterEach, describe, expect, it, vi } from 'vitest' -import { runSig, runSmartSig } from '../../src/cli/commands/auth.ts' -import { main, shouldRunCli } from '../../src/cli.ts' -import { Transloadit } from '../../src/Transloadit.ts' +import { runSig, runSmartSig } from '../../../src/cli/commands/auth.ts' +import { main, shouldRunCli } from '../../../src/cli.ts' +import { Transloadit } from '../../../src/Transloadit.ts' const resetExitCode = () => { process.exitCode = undefined @@ -20,7 +20,7 @@ afterEach(() => { describe('cli smart_sig', () => { it('recognizes symlinked invocation paths', () => { const tmpDir = mkdtempSync(path.join(tmpdir(), 'transloadit-cli-')) - const symlinkTarget = fileURLToPath(new URL('../../src/cli.ts', import.meta.url)) + const symlinkTarget = fileURLToPath(new URL('../../../src/cli.ts', import.meta.url)) const symlinkPath = path.join(tmpDir, 'transloadit') symlinkSync(symlinkTarget, symlinkPath) diff --git a/vitest.config.ts b/vitest.config.ts index 28e30ee7..d3b710d2 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -18,6 +18,6 @@ export default defineConfig({ }, globals: true, testTimeout: 100000, - exclude: ['test/e2e/test-utils.ts', 'test/e2e/OutputCtl.ts'], + exclude: ['test/e2e/cli/test-utils.ts', 'test/e2e/cli/OutputCtl.ts'], }, }) From bb641570f3f6d5ef8580477457bef3625ec5fbff Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Tue, 2 Dec 2025 11:39:04 +0100 Subject: [PATCH 03/45] fix e2e --- test/e2e/cli/templates.test.ts | 5 +++-- test/e2e/live-api.test.ts | 7 +++++-- vitest.config.ts | 7 ++++++- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/test/e2e/cli/templates.test.ts b/test/e2e/cli/templates.test.ts index 70423ede..82b97a60 100644 --- a/test/e2e/cli/templates.test.ts +++ b/test/e2e/cli/templates.test.ts @@ -87,11 +87,12 @@ describe('templates', () => { describe('modify', () => { let templateId: string + const originalName = `original-name-${Date.now()}` beforeAll(async () => { const client = new TransloaditClient({ authKey, authSecret }) const response = await client.createTemplate({ - name: 'original-name', + name: originalName, template: { steps: { dummy: { robot: '/html/convert', url: 'https://example.com' } }, } as TemplateContent, @@ -114,7 +115,7 @@ describe('templates', () => { expect(result).to.have.lengthOf(0) await delay(2000) const template = await client.getTemplate(templateId) - expect(template).to.have.property('name').that.equals('original-name') + expect(template).to.have.property('name').that.equals(originalName) expect(template).to.have.property('content').that.has.property('steps') }), ) diff --git a/test/e2e/live-api.test.ts b/test/e2e/live-api.test.ts index 817d41d3..2becc79d 100644 --- a/test/e2e/live-api.test.ts +++ b/test/e2e/live-api.test.ts @@ -722,7 +722,10 @@ describe('API integration', { timeout: 60000, retry: 1 }, () => { }) it('should allow creating a template', async () => { - const template = await client.createTemplate({ name: templName, template: genericParams }) + const template = await client.createTemplate({ + name: templName, + template: { steps: genericParams.steps }, + }) templId = template.id }) @@ -732,7 +735,7 @@ describe('API integration', { timeout: 60000, retry: 1 }, () => { const template = await client.getTemplate(nn(templId, 'templId')) const { name, content } = template expect(name).toBe(templName) - expect(content).toEqual(genericParams) + expect(content).toEqual({ steps: genericParams.steps }) }) it('should allow editing a template', async () => { diff --git a/vitest.config.ts b/vitest.config.ts index d3b710d2..36c3dcae 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -18,6 +18,11 @@ export default defineConfig({ }, globals: true, testTimeout: 100000, - exclude: ['test/e2e/cli/test-utils.ts', 'test/e2e/cli/OutputCtl.ts'], + exclude: [ + '**/node_modules/**', + '**/dist/**', + 'test/e2e/cli/test-utils.ts', + 'test/e2e/cli/OutputCtl.ts', + ], }, }) From 690603aa9c4a6114506254dda02bff9c81518a57 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Tue, 2 Dec 2025 12:51:53 +0100 Subject: [PATCH 04/45] apply stricter biome & tsconfig rules --- biome.json | 69 +++++++++++++++++------ examples/credentials.ts | 7 ++- examples/template_api.ts | 3 +- src/ApiError.ts | 3 +- src/Transloadit.ts | 66 +++++++++++----------- src/cli/assemblies-create.ts | 23 +++++++- src/cli/commands/auth.ts | 3 +- src/tus.ts | 3 +- test/e2e/cli/assemblies.test.ts | 6 +- test/e2e/live-api.test.ts | 17 +++--- test/testserver.ts | 9 +-- test/tunnel.ts | 3 +- test/unit/mock-http.test.ts | 34 ++++++----- test/unit/test-transloadit-client.test.ts | 3 +- test/unit/transloadit-advanced.test.ts | 12 ++-- test/unit/tus.test.ts | 8 +-- test/util.ts | 3 +- 17 files changed, 169 insertions(+), 103 deletions(-) diff --git a/biome.json b/biome.json index 85545952..aef24a02 100644 --- a/biome.json +++ b/biome.json @@ -1,11 +1,13 @@ { "$schema": "https://biomejs.dev/schemas/2.2.4/schema.json", + "vcs": { "enabled": true, "clientKind": "git", "useIgnoreFile": true }, "files": { + "ignoreUnknown": false, "includes": [ "**", + "!package.json", "!coverage", "!dist", - "!node_modules", "!fixture", "!.vscode", "!src/alphalib" @@ -18,30 +20,43 @@ "indentWidth": 2, "lineEnding": "lf", "lineWidth": 100, - "attributePosition": "auto" + "attributePosition": "auto", + "bracketSameLine": false, + "bracketSpacing": true, + "expand": "auto", + "useEditorconfig": true, + "includes": ["**", "!**/lib/", "!**/node_modules/"] }, - "assist": { "actions": { "source": { "organizeImports": "on" } } }, "linter": { "enabled": true, "rules": { "recommended": true, "suspicious": { - "noExplicitAny": "warn", + "noExplicitAny": "error", "noImplicitAnyLet": "error", - "noConfusingVoidType": "warn", - "noAssignInExpressions": "off", - "noTemplateCurlyInString": "off" + "noConfusingVoidType": "error", + "noAssignInExpressions": "error", + "noArrayIndexKey": "error", + "noShadowRestrictedNames": "error", + "noExportsInTest": "error", + "noDuplicateTestHooks": "error", + "useIterableCallbackReturn": "error", + "noTemplateCurlyInString": "off", + "useAwait": "error" }, "correctness": { - "noInvalidUseBeforeDeclaration": "warn" + "noInvalidUseBeforeDeclaration": "error", + "noVoidTypeReturn": "error" }, "complexity": { - "useLiteralKeys": "off", - "noForEach": "warn" + "useLiteralKeys": "error", + "noForEach": "error" }, "style": { - "noParameterAssign": "warn", - "noUnusedTemplateLiteral": "off", + "noNonNullAssertion": "error", + "noNamespace": "error", + "noParameterAssign": "error", + "noUnusedTemplateLiteral": "error", "useAsConstAssertion": "error", "useDefaultParameterLast": "error", "useEnumInitializers": "error", @@ -49,7 +64,14 @@ "useSingleVarDeclarator": "error", "useNumberNamespace": "error", "noInferrableTypes": "error", - "noUselessElse": "error" + "noUselessElse": "error", + "useImportType": { + "level": "error", + "options": { + "style": "separatedType" + } + }, + "useNodejsImportProtocol": "error" } } }, @@ -60,10 +82,25 @@ "trailingCommas": "all", "semicolons": "asNeeded", "arrowParentheses": "always", - "bracketSpacing": true, "bracketSameLine": false, "quoteStyle": "single", - "attributePosition": "auto" + "attributePosition": "auto", + "bracketSpacing": true } - } + }, + "assist": { + "enabled": true, + "actions": { "source": { "organizeImports": "on" } } + }, + "overrides": [ + { + "includes": ["*.html"], + "javascript": { "formatter": { "quoteStyle": "double" } } + }, + { + "includes": ["*.scss", "*.css"], + "javascript": { "formatter": { "quoteStyle": "double" } }, + "formatter": { "lineWidth": 80 } + } + ] } diff --git a/examples/credentials.ts b/examples/credentials.ts index 397754a8..11db4829 100644 --- a/examples/credentials.ts +++ b/examples/credentials.ts @@ -6,7 +6,8 @@ // // yarn prepack // -import { type CreateTemplateCredentialParams, Transloadit } from 'transloadit' +import type { CreateTemplateCredentialParams } from 'transloadit' +import { Transloadit } from 'transloadit' const { TRANSLOADIT_KEY, TRANSLOADIT_SECRET } = process.env if (TRANSLOADIT_KEY == null || TRANSLOADIT_SECRET == null) { @@ -31,7 +32,7 @@ const credentialParams: CreateTemplateCredentialParams = { }, } -console.log(`==> listTemplateCredentials`) +console.log('==> listTemplateCredentials') const { credentials } = await transloadit.listTemplateCredentials({ sort: 'created', order: 'asc', @@ -50,7 +51,7 @@ for (const credential of credentials) { } } -console.log(`==> createTemplateCredential`) +console.log('==> createTemplateCredential') const createTemplateCredentialResult = await transloadit.createTemplateCredential(credentialParams) console.log('TemplateCredential created successfully:', createTemplateCredentialResult) // ^-- with Templates, there is `ok`, `message`, `id`, `content`, `name`, `require_signature_auth`. Same is true for: created, updated, fetched diff --git a/examples/template_api.ts b/examples/template_api.ts index 9d792bfe..7ef841d8 100644 --- a/examples/template_api.ts +++ b/examples/template_api.ts @@ -6,7 +6,8 @@ // // yarn prepack // -import { type TemplateContent, Transloadit } from 'transloadit' +import type { TemplateContent } from 'transloadit' +import { Transloadit } from 'transloadit' const { TRANSLOADIT_KEY, TRANSLOADIT_SECRET } = process.env if (TRANSLOADIT_KEY == null || TRANSLOADIT_SECRET == null) { diff --git a/src/ApiError.ts b/src/ApiError.ts index d1ec2362..9719a678 100644 --- a/src/ApiError.ts +++ b/src/ApiError.ts @@ -1,4 +1,5 @@ -import { HTTPError, type RequestError } from 'got' +import type { RequestError } from 'got' +import { HTTPError } from 'got' export interface TransloaditErrorResponseBody { error?: string diff --git a/src/Transloadit.ts b/src/Transloadit.ts index f8a29793..55f0fca5 100644 --- a/src/Transloadit.ts +++ b/src/Transloadit.ts @@ -5,26 +5,20 @@ import { access } from 'node:fs/promises' import type { Readable } from 'node:stream' import debug from 'debug' import FormData from 'form-data' -import got, { - type Delays, - type Headers, - HTTPError, - type OptionsOfJSONResponseBody, - RequestError, - type RetryOptions, -} from 'got' +import type { Delays, Headers, OptionsOfJSONResponseBody, RetryOptions } from 'got' +import got, { HTTPError, RequestError } from 'got' import intoStream, { type Input as IntoStreamInput } from 'into-stream' import { isReadableStream, isStream } from 'is-stream' import pMap from 'p-map' import packageJson from '../package.json' with { type: 'json' } -import { ApiError, type TransloaditErrorResponseBody } from './ApiError.ts' -import { - type AssemblyIndex, - type AssemblyIndexItem, - type AssemblyStatus, - assemblyIndexSchema, - assemblyStatusSchema, +import type { TransloaditErrorResponseBody } from './ApiError.ts' +import { ApiError } from './ApiError.ts' +import type { + AssemblyIndex, + AssemblyIndexItem, + AssemblyStatus, } from './alphalib/types/assemblyStatus.ts' +import { assemblyIndexSchema, assemblyStatusSchema } from './alphalib/types/assemblyStatus.ts' import { zodParseWithContext } from './alphalib/zodParseWithContext.ts' import type { BaseResponse, @@ -50,7 +44,8 @@ import type { import InconsistentResponseError from './InconsistentResponseError.ts' import PaginationStream from './PaginationStream.ts' import PollingTimeoutError from './PollingTimeoutError.ts' -import { type Stream, sendTusRequest } from './tus.ts' +import type { Stream } from './tus.ts' +import { sendTusRequest } from './tus.ts' // See https://github.com/sindresorhus/got/tree/v11.8.6?tab=readme-ov-file#errors // Expose relevant errors @@ -294,11 +289,14 @@ export class Transloadit { } // If any stream emits error, we want to handle this and exit with error + // Note: We add a no-op catch to prevent unhandled rejection when createAssemblyAndUpload + // completes first and this promise is orphaned (but streams may still error later) const streamErrorPromise = new Promise((_resolve, reject) => { for (const { stream } of allStreams) { stream.on('error', reject) } }) + streamErrorPromise.catch(() => {}) const createAssemblyAndUpload = async () => { const result: AssemblyStatus = await this._remoteJson({ @@ -460,7 +458,7 @@ export class Transloadit { assemblyId: string, params: ReplayAssemblyNotificationParams = {}, ): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/assembly_notifications/${assemblyId}/replay`, method: 'post', ...(Object.keys(params).length > 0 && { params }), @@ -545,7 +543,7 @@ export class Transloadit { async createTemplateCredential( params: CreateTemplateCredentialParams, ): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: '/template_credentials', method: 'post', params: params || {}, @@ -563,7 +561,7 @@ export class Transloadit { credentialId: string, params: CreateTemplateCredentialParams, ): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/template_credentials/${credentialId}`, method: 'put', params: params || {}, @@ -577,7 +575,7 @@ export class Transloadit { * @returns when the Credential is deleted */ async deleteTemplateCredential(credentialId: string): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/template_credentials/${credentialId}`, method: 'delete', }) @@ -590,7 +588,7 @@ export class Transloadit { * @returns when the Credential is retrieved */ async getTemplateCredential(credentialId: string): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/template_credentials/${credentialId}`, method: 'get', }) @@ -605,7 +603,7 @@ export class Transloadit { async listTemplateCredentials( params?: ListTemplateCredentialsParams, ): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: '/template_credentials', method: 'get', params: params || {}, @@ -625,7 +623,7 @@ export class Transloadit { * @returns when the template is created */ async createTemplate(params: CreateTemplateParams): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: '/templates', method: 'post', params: params || {}, @@ -640,7 +638,7 @@ export class Transloadit { * @returns when the template is edited */ async editTemplate(templateId: string, params: EditTemplateParams): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/templates/${templateId}`, method: 'put', params: params || {}, @@ -654,7 +652,7 @@ export class Transloadit { * @returns when the template is deleted */ async deleteTemplate(templateId: string): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/templates/${templateId}`, method: 'delete', }) @@ -667,7 +665,7 @@ export class Transloadit { * @returns when the template is retrieved */ async getTemplate(templateId: string): Promise { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/templates/${templateId}`, method: 'get', }) @@ -682,7 +680,7 @@ export class Transloadit { async listTemplates( params?: ListTemplatesParams, ): Promise> { - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: '/templates', method: 'get', params: params || {}, @@ -702,7 +700,7 @@ export class Transloadit { */ async getBill(month: string): Promise { assert.ok(month, 'month is required') - return this._remoteJson({ + return await this._remoteJson({ urlSuffix: `/bill/${month}`, method: 'get', }) @@ -799,14 +797,14 @@ export class Transloadit { if (params == null) { params = {} } - if (params['auth'] == null) { - params['auth'] = {} + if (params.auth == null) { + params.auth = {} } - if (params['auth'].key == null) { - params['auth'].key = this._authKey + if (params.auth.key == null) { + params.auth.key = this._authKey } - if (params['auth'].expires == null) { - params['auth'].expires = this._getExpiresDate() + if (params.auth.expires == null) { + params.auth.expires = this._getExpiresDate() } return JSON.stringify(params) diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index ce00fc70..793bd17b 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -136,6 +136,9 @@ function dirProvider(output: string): OutstreamProvider { mtime = new Date(0) } const outstream = fs.createWriteStream(outpath) as OutStream + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + outstream.on('error', () => {}) outstream.mtime = mtime return outstream } @@ -155,6 +158,9 @@ function fileProvider(output: string): OutstreamProvider { mtime = new Date(0) } const outstream = fs.createWriteStream(output) as OutStream + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + outstream.on('error', () => {}) outstream.mtime = mtime return outstream } @@ -258,7 +264,11 @@ class ReaddirJobEmitter extends MyEventEmitter { if (existing) existing.end() const outstream = await outstreamProvider(file, topdir) streamRegistry[file] = outstream ?? undefined - this.emit('job', { in: fs.createReadStream(file), out: outstream }) + const instream = fs.createReadStream(file) + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + instream.on('error', () => {}) + this.emit('job', { in: instream, out: outstream }) } } } @@ -282,6 +292,9 @@ class SingleJobEmitter extends MyEventEmitter { } } else { instream = fs.createReadStream(normalizedFile) + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + instream.on('error', () => {}) } process.nextTick(() => { @@ -366,6 +379,9 @@ class WatchJobEmitter extends MyEventEmitter { streamRegistry[normalizedFile] = outstream ?? undefined const instream = fs.createReadStream(normalizedFile) + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + instream.on('error', () => {}) this.emit('job', { in: instream, out: outstream }) } } @@ -639,7 +655,10 @@ export default async function run( }) const jobsPromise = new JobsPromise() + const activeJobs: Set = new Set() + emitter.on('job', (job: Job) => { + activeJobs.add(job) const inPath = job.in ? ((job.in as fs.ReadStream).path as string | undefined) : undefined const outPath = job.out?.path outputctl.debug(`GOT JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) @@ -720,6 +739,7 @@ export default async function run( jobsPromise.add(jobPromise) async function completeJob(): Promise { + activeJobs.delete(job) const inPath = job.in ? ((job.in as fs.ReadStream).path as string | undefined) : undefined const outPath = job.out?.path outputctl.debug(`COMPLETED ${inPath ?? 'null'} ${outPath ?? 'null'}`) @@ -735,6 +755,7 @@ export default async function run( }) emitter.on('error', (err: Error) => { + activeJobs.clear() outputctl.error(err) reject(err) }) diff --git a/src/cli/commands/auth.ts b/src/cli/commands/auth.ts index f7ddd8e5..9cc4a449 100644 --- a/src/cli/commands/auth.ts +++ b/src/cli/commands/auth.ts @@ -1,6 +1,7 @@ import process from 'node:process' import { Command, Option } from 'clipanion' -import { type ZodIssue, z } from 'zod' +import type { ZodIssue } from 'zod' +import { z } from 'zod' import { assemblyAuthInstructionsSchema, assemblyInstructionsSchema, diff --git a/src/tus.ts b/src/tus.ts index 80be39d1..d57ebf4f 100644 --- a/src/tus.ts +++ b/src/tus.ts @@ -3,7 +3,8 @@ import { basename } from 'node:path' import type { Readable } from 'node:stream' import debug from 'debug' import pMap from 'p-map' -import { type OnSuccessPayload, Upload, type UploadOptions } from 'tus-js-client' +import type { OnSuccessPayload, UploadOptions } from 'tus-js-client' +import { Upload } from 'tus-js-client' import type { AssemblyStatus } from './alphalib/types/assemblyStatus.ts' import type { UploadProgress } from './Transloadit.ts' diff --git a/test/e2e/cli/assemblies.test.ts b/test/e2e/cli/assemblies.test.ts index 16c8d4c4..a9f549a9 100644 --- a/test/e2e/cli/assemblies.test.ts +++ b/test/e2e/cli/assemblies.test.ts @@ -329,8 +329,10 @@ describe('assemblies', () => { .to.have.nested.property('msg.message') .that.equals("Output collision between 'in/1.jpg' and '1.jpg'") } - // Allow time for any pending file streams to settle/error before test cleanup - await new Promise((resolve) => setTimeout(resolve, 100)) + // Allow time for any pending file streams and API calls to settle before test cleanup. + // The assembliesCreate function may have started createAssembly calls that are still + // in flight when the conflict error is thrown. These need time to be handled/cancelled. + await new Promise((resolve) => setTimeout(resolve, 500)) }), ) diff --git a/test/e2e/live-api.test.ts b/test/e2e/live-api.test.ts index 2becc79d..b1a13f5e 100644 --- a/test/e2e/live-api.test.ts +++ b/test/e2e/live-api.test.ts @@ -12,13 +12,14 @@ import intoStream from 'into-stream' import * as temp from 'temp' import type { InterpolatableRobotFileFilterInstructionsInput } from '../../src/alphalib/types/robots/file-filter.ts' import type { InterpolatableRobotImageResizeInstructionsInput } from '../../src/alphalib/types/robots/image-resize.ts' -import { - type CreateAssemblyOptions, - type CreateAssemblyParams, - Transloadit, - type UploadProgress, +import type { + CreateAssemblyOptions, + CreateAssemblyParams, + UploadProgress, } from '../../src/Transloadit.ts' -import { createTestServer, type TestServer } from '../testserver.ts' +import { Transloadit } from '../../src/Transloadit.ts' +import type { TestServer } from '../testserver.ts' +import { createTestServer } from '../testserver.ts' import { createProxy } from '../util.ts' // Load environment variables from .env file @@ -154,7 +155,7 @@ interface VirtualTestServer { url: string } -async function createVirtualTestServer(handler: RequestListener): Promise { +function createVirtualTestServer(handler: RequestListener): VirtualTestServer { const id = randomUUID() log('Adding virtual server handler', id) const url = `${testServer.url}/${id}` @@ -644,7 +645,7 @@ describe('API integration', { timeout: 60000, retry: 1 }, () => { it('should send a notification upon assembly completion', async () => { await new Promise((resolve, reject) => { - const onNotification: OnNotification = async ({ path }) => { + const onNotification: OnNotification = ({ path }) => { try { expect(path).toBe('/') resolve() diff --git a/test/testserver.ts b/test/testserver.ts index efe23023..5368c5ad 100644 --- a/test/testserver.ts +++ b/test/testserver.ts @@ -1,9 +1,10 @@ -import { createServer, type RequestListener, type Server } from 'node:http' +import type { RequestListener, Server } from 'node:http' +import { createServer } from 'node:http' import { setTimeout } from 'node:timers/promises' import debug from 'debug' import got from 'got' - -import { type CreateTunnelResult, createTunnel } from './tunnel.ts' +import type { CreateTunnelResult } from './tunnel.ts' +import { createTunnel } from './tunnel.ts' const log = debug('transloadit:testserver') @@ -12,7 +13,7 @@ interface HttpServer { port: number } -async function createHttpServer(handler: RequestListener): Promise { +function createHttpServer(handler: RequestListener): Promise { return new Promise((resolve, reject) => { const server = createServer(handler) diff --git a/test/tunnel.ts b/test/tunnel.ts index b52f1e0a..1cf0b592 100644 --- a/test/tunnel.ts +++ b/test/tunnel.ts @@ -2,7 +2,8 @@ import { Resolver } from 'node:dns/promises' import { createInterface } from 'node:readline' import * as timers from 'node:timers/promises' import debug from 'debug' -import { ExecaError, execa, type ResultPromise } from 'execa' +import type { ResultPromise } from 'execa' +import { ExecaError, execa } from 'execa' import pRetry from 'p-retry' const log = debug('transloadit:cloudflared-tunnel') diff --git a/test/unit/mock-http.test.ts b/test/unit/mock-http.test.ts index d11a0c24..09776526 100644 --- a/test/unit/mock-http.test.ts +++ b/test/unit/mock-http.test.ts @@ -1,12 +1,10 @@ import { inspect } from 'node:util' import nock from 'nock' - +import type { AssemblyStatus, Options } from '../../src/Transloadit.ts' import { ApiError, - type AssemblyStatus, assemblyInstructionsSchema, InconsistentResponseError, - type Options, TimeoutError, Transloadit, } from '../../src/Transloadit.ts' @@ -157,30 +155,30 @@ describe('Mocked API tests', () => { // console.log(inspect(errorString)) expect(inspect(errorString).split('\n')).toEqual([ expect.stringMatching( - `API error \\(HTTP 400\\) INVALID_FILE_META_DATA: Invalid file metadata https://api2-oltu.transloadit.com/assemblies/foo`, + 'API error \\(HTTP 400\\) INVALID_FILE_META_DATA: Invalid file metadata https://api2-oltu.transloadit.com/assemblies/foo', ), - expect.stringMatching(` at .+`), - expect.stringMatching(` at .+`), + expect.stringMatching(' at .+'), + expect.stringMatching(' at .+'), expect.stringMatching( - ` at createAssemblyAndUpload \\(.+\\/src\\/Transloadit\\.ts:\\d+:\\d+\\)`, + ' at createAssemblyAndUpload \\(.+\\/src\\/Transloadit\\.ts:\\d+:\\d+\\)', ), - expect.stringMatching(` at .+\\/test\\/unit\\/mock-http\\.test\\.ts:\\d+:\\d+`), - expect.stringMatching(` at .+`), + expect.stringMatching(' at .+\\/test\\/unit\\/mock-http\\.test\\.ts:\\d+:\\d+'), + expect.stringMatching(' at .+'), expect.stringMatching(` code: 'INVALID_FILE_META_DATA',`), expect.stringMatching(` rawMessage: 'Invalid file metadata',`), - expect.stringMatching(` reason: undefined,`), + expect.stringMatching(' reason: undefined,'), expect.stringMatching( ` assemblySslUrl: 'https:\\/\\/api2-oltu\\.transloadit\\.com\\/assemblies\\/foo'`, ), expect.stringMatching(` assemblyId: '123',`), - expect.stringMatching(` cause: HTTPError: Response code 400 \\(Bad Request\\)`), - expect.stringMatching(` at .+`), - expect.stringMatching(` at .+`), - expect.stringMatching(` at .+`), - expect.stringMatching(` at .+`), - expect.stringMatching(` at .+`), - expect.stringMatching(` at .+`), - expect.stringMatching(` input: undefined,`), + expect.stringMatching(' cause: HTTPError: Response code 400 \\(Bad Request\\)'), + expect.stringMatching(' at .+'), + expect.stringMatching(' at .+'), + expect.stringMatching(' at .+'), + expect.stringMatching(' at .+'), + expect.stringMatching(' at .+'), + expect.stringMatching(' at .+'), + expect.stringMatching(' input: undefined,'), expect.stringMatching(` code: 'ERR_NON_2XX_3XX_RESPONSE',`), expect.stringMatching(' \\[cause\\]: {}'), expect.stringMatching(' }'), diff --git a/test/unit/test-transloadit-client.test.ts b/test/unit/test-transloadit-client.test.ts index d5f57159..0fc8f228 100644 --- a/test/unit/test-transloadit-client.test.ts +++ b/test/unit/test-transloadit-client.test.ts @@ -1,4 +1,5 @@ -import { PassThrough, type Readable } from 'node:stream' +import type { Readable } from 'node:stream' +import { PassThrough } from 'node:stream' import FormData from 'form-data' import got, { type CancelableRequest } from 'got' diff --git a/test/unit/transloadit-advanced.test.ts b/test/unit/transloadit-advanced.test.ts index 64b4c1c9..712db296 100644 --- a/test/unit/transloadit-advanced.test.ts +++ b/test/unit/transloadit-advanced.test.ts @@ -151,22 +151,22 @@ describe('Transloadit advanced behaviors', () => { it('streams assemblies page by page until all items are read', async () => { type ListAssembliesReturn = Awaited> - const listAssemblies = vi.spyOn(client, 'listAssemblies').mockImplementation(async (params) => { + const listAssemblies = vi.spyOn(client, 'listAssemblies').mockImplementation((params) => { const page = params?.page ?? 1 if (page === 1) { - return { + return Promise.resolve({ items: [{ id: 1 }, { id: 2 }], count: 3, - } as unknown as ListAssembliesReturn + } as unknown as ListAssembliesReturn) } if (page === 2) { - return { + return Promise.resolve({ items: [{ id: 3 }], count: 3, - } as unknown as ListAssembliesReturn + } as unknown as ListAssembliesReturn) } - return { items: [], count: 3 } as unknown as ListAssembliesReturn + return Promise.resolve({ items: [], count: 3 } as unknown as ListAssembliesReturn) }) const stream = client.streamAssemblies({ page: 1 } as never) diff --git a/test/unit/tus.test.ts b/test/unit/tus.test.ts index a96fc547..c98623fc 100644 --- a/test/unit/tus.test.ts +++ b/test/unit/tus.test.ts @@ -84,9 +84,9 @@ describe('sendTusRequest', () => { [secondPath]: 2048, } - statMock.mockImplementation(async (path: StatPathArg) => { + statMock.mockImplementation((path: StatPathArg) => { const key = typeof path === 'string' ? path : path.toString() - return createStatResult(sizesByPath[key] ?? 0) + return Promise.resolve(createStatResult(sizesByPath[key] ?? 0)) }) const onProgress = vi.fn() @@ -155,9 +155,9 @@ describe('sendTusRequest', () => { [secondPath]: 100, } - statMock.mockImplementation(async (path: StatPathArg) => { + statMock.mockImplementation((path: StatPathArg) => { const key = typeof path === 'string' ? path : path.toString() - return createStatResult(sizesByPath[key] ?? 0) + return Promise.resolve(createStatResult(sizesByPath[key] ?? 0)) }) const onProgress = vi.fn() diff --git a/test/util.ts b/test/util.ts index 1a4c4197..ed71d0f8 100644 --- a/test/util.ts +++ b/test/util.ts @@ -1,4 +1,5 @@ -import { RequestError, type Transloadit } from '../src/Transloadit.ts' +import type { Transloadit } from '../src/Transloadit.ts' +import { RequestError } from '../src/Transloadit.ts' export const createProxy = (transloaditInstance: Transloadit) => { return new Proxy(transloaditInstance, { From ebbbcb70fc82e8dbb07d3f1d5780bee418571f4e Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Tue, 2 Dec 2025 13:18:00 +0100 Subject: [PATCH 05/45] abort assemblies --- CHANGELOG.md | 6 +++++ src/Transloadit.ts | 11 ++++++++++ src/cli/assemblies-create.ts | 9 +++++++- src/tus.ts | 39 +++++++++++++++++++++++++++++---- test/e2e/cli/assemblies.test.ts | 6 ++--- test/tunnel.ts | 1 + 6 files changed, 63 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2183316a..83546b2d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ You may also want to refer to [GitHub releases](https://github.com/transloadit/node-sdk/releases). +## Unreleased + +- Add `signal` option to `createAssembly()` for cancelling in-flight HTTP requests and TUS uploads via `AbortController` +- Integrate transloadify CLI into the SDK, providing `assemblies`, `templates`, and `bills` commands +- Apply stricter biome lint rules (noExplicitAny, useAwait, noForEach, noNonNullAssertion) + ## v4.0.7 Released: 2025-11-26. diff --git a/src/Transloadit.ts b/src/Transloadit.ts index 55f0fca5..af88d10f 100644 --- a/src/Transloadit.ts +++ b/src/Transloadit.ts @@ -90,6 +90,11 @@ export interface CreateAssemblyOptions { onUploadProgress?: (uploadProgress: UploadProgress) => void onAssemblyProgress?: AssemblyProgress assemblyId?: string + /** + * Optional AbortSignal to cancel the assembly creation and upload. + * When aborted, any in-flight HTTP requests and TUS uploads will be cancelled. + */ + signal?: AbortSignal } export interface AwaitAssemblyCompletionOptions { @@ -231,6 +236,7 @@ export class Transloadit { files = {}, uploads = {}, assemblyId, + signal, } = opts // Keep track of how long the request took @@ -307,6 +313,7 @@ export class Transloadit { fields: { tus_num_expected_upload_files: allStreams.length, }, + signal, }) checkResult(result) @@ -317,6 +324,7 @@ export class Transloadit { onProgress: onUploadProgress, requestedChunkSize, uploadConcurrency, + signal, }) } @@ -828,6 +836,7 @@ export class Transloadit { params?: TParams fields?: Fields headers?: Headers + signal?: AbortSignal }): Promise { const { urlSuffix, @@ -837,6 +846,7 @@ export class Transloadit { params = {}, fields, headers, + signal, } = opts // Allow providing either a `urlSuffix` or a full `url` @@ -869,6 +879,7 @@ export class Transloadit { ...headers, }, responseType: 'json', + signal, } try { diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index 793bd17b..7505dc71 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -656,6 +656,8 @@ export default async function run( const jobsPromise = new JobsPromise() const activeJobs: Set = new Set() + // AbortController to cancel all in-flight createAssembly calls when an error occurs + const abortController = new AbortController() emitter.on('job', (job: Job) => { activeJobs.add(job) @@ -669,7 +671,10 @@ export default async function run( superceded = true }) - const createOptions: CreateAssemblyOptions = { params } + const createOptions: CreateAssemblyOptions = { + params, + signal: abortController.signal, + } if (job.in != null) { createOptions.uploads = { in: job.in } } @@ -755,6 +760,8 @@ export default async function run( }) emitter.on('error', (err: Error) => { + // Abort all in-flight createAssembly calls to ensure clean shutdown + abortController.abort() activeJobs.clear() outputctl.error(err) reject(err) diff --git a/src/tus.ts b/src/tus.ts index d57ebf4f..5f4a1b3c 100644 --- a/src/tus.ts +++ b/src/tus.ts @@ -21,6 +21,7 @@ interface SendTusRequestOptions { requestedChunkSize: number uploadConcurrency: number onProgress: (options: UploadProgress) => void + signal?: AbortSignal } export async function sendTusRequest({ @@ -29,6 +30,7 @@ export async function sendTusRequest({ requestedChunkSize, uploadConcurrency, onProgress, + signal, }: SendTusRequestOptions) { const streamLabels = Object.keys(streamsMap) @@ -43,6 +45,9 @@ export async function sendTusRequest({ await pMap( streamLabels, async (label) => { + // Check if aborted before each operation + if (signal?.aborted) throw new Error('Upload aborted') + const streamInfo = streamsMap[label] if (!streamInfo) { throw new Error(`Stream info not found for label: ${label}`) @@ -55,7 +60,7 @@ export async function sendTusRequest({ totalBytes += size } }, - { concurrency: 5 }, + { concurrency: 5, signal }, ) const uploadProgresses: Record = {} @@ -103,12 +108,29 @@ export async function sendTusRequest({ const filename = path ? basename(path) : label - await new Promise((resolve, reject) => { + await new Promise((resolvePromise, rejectPromise) => { if (!assembly.assembly_ssl_url) { - reject(new Error('assembly_ssl_url is not present in the assembly status')) + rejectPromise(new Error('assembly_ssl_url is not present in the assembly status')) return } + // Check if already aborted before starting + if (signal?.aborted) { + rejectPromise(new Error('Upload aborted')) + return + } + + // Wrap resolve/reject to clean up abort listener + let abortHandler: (() => void) | undefined + const resolve = (payload: OnSuccessPayload) => { + if (abortHandler) signal?.removeEventListener('abort', abortHandler) + resolvePromise(payload) + } + const reject = (err: unknown) => { + if (abortHandler) signal?.removeEventListener('abort', abortHandler) + rejectPromise(err) + } + const tusOptions: UploadOptions = { endpoint: assembly.tus_url, metadata: { @@ -127,11 +149,20 @@ export async function sendTusRequest({ const tusUpload = new Upload(stream, tusOptions) + // Handle abort signal + if (signal) { + abortHandler = () => { + tusUpload.abort() + reject(new Error('Upload aborted')) + } + signal.addEventListener('abort', abortHandler, { once: true }) + } + tusUpload.start() }) log(label, 'upload done') } - await pMap(streamLabels, uploadSingleStream, { concurrency: uploadConcurrency }) + await pMap(streamLabels, uploadSingleStream, { concurrency: uploadConcurrency, signal }) } diff --git a/test/e2e/cli/assemblies.test.ts b/test/e2e/cli/assemblies.test.ts index a9f549a9..feaedc33 100644 --- a/test/e2e/cli/assemblies.test.ts +++ b/test/e2e/cli/assemblies.test.ts @@ -329,10 +329,8 @@ describe('assemblies', () => { .to.have.nested.property('msg.message') .that.equals("Output collision between 'in/1.jpg' and '1.jpg'") } - // Allow time for any pending file streams and API calls to settle before test cleanup. - // The assembliesCreate function may have started createAssembly calls that are still - // in flight when the conflict error is thrown. These need time to be handled/cancelled. - await new Promise((resolve) => setTimeout(resolve, 500)) + // Small delay to allow abort signals to propagate and streams to close + await new Promise((resolve) => setTimeout(resolve, 50)) }), ) diff --git a/test/tunnel.ts b/test/tunnel.ts index 1cf0b592..41c6327a 100644 --- a/test/tunnel.ts +++ b/test/tunnel.ts @@ -55,6 +55,7 @@ async function startTunnel({ cloudFlaredPath, port }: CreateTunnelParams) { 'failed to sufficiently increase receive buffer size', 'update check failed error', 'failed to parse quick Tunnel ID', + 'failed to unmarshal quick Tunnel', // Transient Cloudflare API JSON parsing error ] rl.on('line', (line) => { From 714ea73be26ff10989775f694b6563a66a77cfd2 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Tue, 2 Dec 2025 13:56:26 +0100 Subject: [PATCH 06/45] Improve types, make replay clearer, roll test for it --- src/apiTypes.ts | 2 +- src/cli/assemblies.ts | 12 +++++---- src/cli/commands/assemblies.ts | 16 ++++++++--- src/cli/templates.ts | 30 ++++++++++----------- test/e2e/cli/assemblies.test.ts | 48 ++++++++++++++++++++++++++++++++- test/e2e/cli/templates.test.ts | 9 ++++--- 6 files changed, 87 insertions(+), 30 deletions(-) diff --git a/src/apiTypes.ts b/src/apiTypes.ts index a5f28b1c..28d474c8 100644 --- a/src/apiTypes.ts +++ b/src/apiTypes.ts @@ -44,7 +44,7 @@ export type ListAssembliesParams = OptionalAuthParams & { export type ReplayAssemblyParams = Pick< CreateAssemblyParams, - 'auth' | 'template_id' | 'notify_url' | 'fields' + 'auth' | 'template_id' | 'notify_url' | 'fields' | 'steps' > & { reparse_template?: number } diff --git a/src/cli/assemblies.ts b/src/cli/assemblies.ts index 2330662b..2361a2a3 100644 --- a/src/cli/assemblies.ts +++ b/src/cli/assemblies.ts @@ -1,5 +1,7 @@ import { z } from 'zod' import { tryCatch } from '../alphalib/tryCatch.ts' +import { stepsSchema, type Steps } from '../alphalib/types/template.ts' +import type { ReplayAssemblyParams } from '../apiTypes.ts' import type { Transloadit } from '../Transloadit.ts' import assembliesCreate from './assemblies-create.ts' import { createReadStream, formatAPIError, streamToBuffer } from './helpers.ts' @@ -103,8 +105,6 @@ async function _delete( export { _delete as delete } -const StepsSchema = z.record(z.string(), z.unknown()) - export async function replay( output: IOutputCtl, client: Transloadit, @@ -114,9 +114,9 @@ export async function replay( try { const buf = await streamToBuffer(createReadStream(steps)) const parsed: unknown = JSON.parse(buf.toString()) - const validated = StepsSchema.safeParse(parsed) + const validated = stepsSchema.safeParse(parsed) if (!validated.success) { - throw new Error('Invalid steps format') + throw new Error(`Invalid steps format: ${validated.error.message}`) } await apiCall(validated.data) } catch (err) { @@ -127,13 +127,15 @@ export async function replay( await apiCall() } - async function apiCall(_steps?: Record): Promise { + async function apiCall(stepsOverride?: Steps): Promise { const promises = assemblies.map(async (assembly) => { const [err] = await tryCatch( client.replayAssembly(assembly, { reparse_template: reparse ? 1 : 0, fields, notify_url, + // Steps (validated) is assignable to StepsInput at runtime; cast for TS + steps: stepsOverride as ReplayAssemblyParams['steps'], }), ) if (err) { diff --git a/src/cli/commands/assemblies.ts b/src/cli/commands/assemblies.ts index 7989b18a..cb569426 100644 --- a/src/cli/commands/assemblies.ts +++ b/src/cli/commands/assemblies.ts @@ -227,11 +227,19 @@ export class AssembliesReplayCommand extends AuthenticatedCommand { static override usage = Command.Usage({ category: 'Assemblies', description: 'Replay assemblies', + details: ` + Replay one or more assemblies. By default, replays use the original assembly instructions. + Use --steps to override the instructions, or --reparse-template to use the latest template version. + `, examples: [ - ['Replay an assembly', 'transloadit assemblies replay ASSEMBLY_ID'], + ['Replay an assembly with original steps', 'transloadit assemblies replay ASSEMBLY_ID'], + [ + 'Replay with different steps', + 'transloadit assemblies replay --steps new-steps.json ASSEMBLY_ID', + ], [ - 'Replay with new notify URL', - 'transloadit assemblies replay --notify-url https://example.com/notify ASSEMBLY_ID', + 'Replay with updated template', + 'transloadit assemblies replay --reparse-template ASSEMBLY_ID', ], ], }) @@ -241,7 +249,7 @@ export class AssembliesReplayCommand extends AuthenticatedCommand { }) steps = Option.String('--steps,-s', { - description: 'Override assembly instructions', + description: 'Optional JSON file to override assembly instructions', }) notifyUrl = Option.String('--notify-url', { diff --git a/src/cli/templates.ts b/src/cli/templates.ts index 78de1048..8e15c3fa 100644 --- a/src/cli/templates.ts +++ b/src/cli/templates.ts @@ -4,6 +4,7 @@ import { promisify } from 'node:util' import rreaddir from 'recursive-readdir' import { z } from 'zod' import { tryCatch } from '../alphalib/tryCatch.ts' +import { stepsSchema, type Steps } from '../alphalib/types/template.ts' import type { TemplateContent } from '../apiTypes.ts' import type { Transloadit } from '../Transloadit.ts' import { createReadStream, formatAPIError, streamToBuffer } from './helpers.ts' @@ -46,8 +47,6 @@ export interface TemplateSyncOptions { recursive?: boolean } -const StepsSchema = z.record(z.string(), z.unknown()) - export async function create( output: IOutputCtl, client: Transloadit, @@ -57,13 +56,14 @@ export async function create( const buf = await streamToBuffer(createReadStream(file)) const parsed: unknown = JSON.parse(buf.toString()) - const validated = StepsSchema.safeParse(parsed) + const validated = stepsSchema.safeParse(parsed) if (!validated.success) { - throw new Error('Invalid template steps format') + throw new Error(`Invalid template steps format: ${validated.error.message}`) } const result = await client.createTemplate({ name, + // Steps (validated) is assignable to StepsInput at runtime; cast for TS template: { steps: validated.data } as TemplateContent, }) output.print(result.id, result) @@ -101,36 +101,34 @@ export async function modify( try { const buf = await streamToBuffer(createReadStream(file)) - let json: Record | null = null + let steps: Steps | null = null let newName = name if (buf.length > 0) { const parsed: unknown = JSON.parse(buf.toString()) - const validated = StepsSchema.safeParse(parsed) + const validated = stepsSchema.safeParse(parsed) if (!validated.success) { - throw new Error('Invalid template steps format') + throw new Error(`Invalid template steps format: ${validated.error.message}`) } - json = validated.data + steps = validated.data } if (!name || buf.length === 0) { const tpl = await client.getTemplate(template) if (!name) newName = tpl.name - if (buf.length === 0) { - const stepsContent = tpl.content.steps - if (stepsContent && typeof stepsContent === 'object') { - json = stepsContent as Record - } + if (buf.length === 0 && tpl.content.steps) { + steps = tpl.content.steps } } - if (json === null) { + if (steps === null) { throw new Error('No steps to update template with') } await client.editTemplate(template, { name: newName, - template: { steps: json } as TemplateContent, + // Steps (validated) is assignable to StepsInput at runtime; cast for TS + template: { steps } as TemplateContent, }) } catch (err) { output.error(formatAPIError(err)) @@ -318,7 +316,7 @@ export async function sync( const result = await client.getTemplate(templateId) - template.data.steps = result.content as Record + template.data.steps = result.content.steps const file = path.join(path.dirname(template.file), `${result.name}.json`) await fsp.writeFile(template.file, JSON.stringify(template.data)) diff --git a/test/e2e/cli/assemblies.test.ts b/test/e2e/cli/assemblies.test.ts index feaedc33..e12d9445 100644 --- a/test/e2e/cli/assemblies.test.ts +++ b/test/e2e/cli/assemblies.test.ts @@ -106,7 +106,7 @@ describe('assemblies', () => { describe('replay', () => { it( - 'should replay assemblies', + 'should replay assemblies without steps (uses original)', testCase(async (client) => { const assembly = await client.createAssembly({ params: { @@ -124,6 +124,52 @@ describe('assemblies', () => { expect(logs.filter((l) => l.type === 'error')).to.have.lengthOf(0) }), ) + + it( + 'should replay assemblies with steps override', + testCase(async (client) => { + // Create an assembly with 100x100 resize + const assembly = await client.createAssembly({ + params: { + steps: { + import: { robot: '/http/import', url: 'https://placehold.co/100.jpg' }, + resize: { + robot: '/image/resize', + use: 'import', + result: true, + width: 50, + height: 50, + }, + }, + }, + }) + + // Create steps file with different dimensions (80x80) + const overrideSteps = { + import: { robot: '/http/import', url: 'https://placehold.co/100.jpg' }, + resize: { + robot: '/image/resize', + use: 'import', + result: true, + width: 80, + height: 80, + }, + } + await fsp.writeFile('override-steps.json', JSON.stringify(overrideSteps)) + + const output = new OutputCtl() + const assemblyId = assembly.assembly_id as string + await assemblies.replay(output, client, { + assemblies: [assemblyId], + steps: 'override-steps.json', + }) + const logs = output.get() as OutputEntry[] + expect(logs.filter((l) => l.type === 'error')).to.have.lengthOf(0) + + // Note: We can't easily verify the output dimensions here without downloading, + // but the test verifies the steps file is parsed and sent without errors + }), + ) }) describe('create', () => { diff --git a/test/e2e/cli/templates.test.ts b/test/e2e/cli/templates.test.ts index 82b97a60..a02ad873 100644 --- a/test/e2e/cli/templates.test.ts +++ b/test/e2e/cli/templates.test.ts @@ -16,7 +16,8 @@ describe('templates', () => { testCase(async (client) => { const executions = [1, 2, 3, 4, 5].map(async (n) => { const output = new OutputCtl() - await fsp.writeFile(`${n}.json`, JSON.stringify({ testno: n })) + const steps = { import: { robot: '/http/import', url: `https://example.com/${n}` } } + await fsp.writeFile(`${n}.json`, JSON.stringify(steps)) await templates.create(output, client, { name: `test-${n}`, file: `${n}.json` }) return output.get() as OutputEntry[] }) @@ -103,7 +104,8 @@ describe('templates', () => { it( 'should modify but not rename the template', testCase(async (client) => { - await fsp.writeFile('template.json', JSON.stringify({ stage: 1 })) + const steps = { import: { robot: '/http/import', url: 'https://example.com/modified' } } + await fsp.writeFile('template.json', JSON.stringify(steps)) const output = new OutputCtl() await templates.modify(output, client, { @@ -144,7 +146,8 @@ describe('templates', () => { it( 'should modify and rename the template', testCase(async (client) => { - await fsp.writeFile('template.json', JSON.stringify({ stage: 2 })) + const steps = { import: { robot: '/http/import', url: 'https://example.com/renamed' } } + await fsp.writeFile('template.json', JSON.stringify(steps)) const output = new OutputCtl() await templates.modify(output, client, { From 164354eedfee85bfdd211f3debb7972571398348 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Tue, 2 Dec 2025 14:03:27 +0100 Subject: [PATCH 07/45] stronger types --- src/cli/assemblies-create.ts | 12 +++++++++--- src/cli/commands/auth.ts | 15 ++++++--------- src/cli/types.ts | 16 +++++++++++----- 3 files changed, 26 insertions(+), 17 deletions(-) diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index 7505dc71..4c0c5362 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -8,6 +8,7 @@ import process from 'node:process' import type { Readable, Writable } from 'node:stream' import tty from 'node:tty' import { promisify } from 'node:util' +import { stepsSchema, type Steps } from '../alphalib/types/template.ts' import type { CreateAssemblyParams } from '../apiTypes.ts' import type { AssemblyStatus, CreateAssemblyOptions, Transloadit } from '../Transloadit.ts' import JobsPromise from './JobsPromise.ts' @@ -599,11 +600,16 @@ export default async function run( let resolvedOutput = output if (resolvedOutput === undefined && !process.stdout.isTTY) resolvedOutput = '-' - // Read steps file async before entering the Promise constructor - let stepsData: CreateAssemblyParams['steps'] | undefined + // Read and validate steps file async before entering the Promise constructor + let stepsData: Steps | undefined if (steps) { const stepsContent = await fsp.readFile(steps, 'utf8') - stepsData = JSON.parse(stepsContent) as CreateAssemblyParams['steps'] + const parsed: unknown = JSON.parse(stepsContent) + const validated = stepsSchema.safeParse(parsed) + if (!validated.success) { + throw new Error(`Invalid steps format: ${validated.error.message}`) + } + stepsData = validated.data } // Determine output stat async before entering the Promise constructor diff --git a/src/cli/commands/auth.ts b/src/cli/commands/auth.ts index 9cc4a449..03b1818e 100644 --- a/src/cli/commands/auth.ts +++ b/src/cli/commands/auth.ts @@ -29,6 +29,8 @@ const cliSignatureParamsSchema = assemblyInstructionsSchema .partial() .passthrough() +type CliSignatureParams = z.infer + function formatIssues(issues: ZodIssue[]): string { return issues .map((issue) => { @@ -99,7 +101,7 @@ function generateSignature( algorithm?: string, ): SigResult { const { authKey, authSecret } = credentials - let params: Record + let params: CliSignatureParams if (input === '') { params = { auth: { key: authKey } } @@ -120,13 +122,8 @@ function generateSignature( return { ok: false, error: `Invalid params: ${formatIssues(parsedResult.error.issues)}` } } - const parsedParams = parsedResult.data as Record - const existingAuth = - typeof parsedParams.auth === 'object' && - parsedParams.auth != null && - !Array.isArray(parsedParams.auth) - ? (parsedParams.auth as Record) - : {} + const parsedParams = parsedResult.data + const existingAuth = parsedParams.auth ?? {} params = { ...parsedParams, @@ -178,7 +175,7 @@ function generateSmartCdnUrl( } const { workspace, template, input: inputFieldRaw, url_params, expire_at_ms } = parsedResult.data - const urlParams = normalizeUrlParams(url_params as Record | undefined) + const urlParams = normalizeUrlParams(url_params) let expiresAt: number | undefined if (typeof expire_at_ms === 'string') { diff --git a/src/cli/types.ts b/src/cli/types.ts index 98deeae8..b4ce710f 100644 --- a/src/cli/types.ts +++ b/src/cli/types.ts @@ -1,4 +1,5 @@ import { z } from 'zod' +import { optionalStepsSchema, type Steps } from '../alphalib/types/template.ts' import type { BillResponse, ListedTemplate, TemplateResponse } from '../apiTypes.ts' import type { AssemblyStatus, Transloadit } from '../Transloadit.ts' import type { IOutputCtl } from './OutputCtl.ts' @@ -33,14 +34,19 @@ export const TransloaditAPIErrorSchema = z.object({ }) export type TransloaditAPIError = z.infer -// Template file data -export const TemplateFileDataSchema = z +// Template file data - explicit type to avoid TS inference limits +export interface TemplateFileData { + transloadit_template_id?: string + steps?: Steps + [key: string]: unknown // passthrough +} + +export const TemplateFileDataSchema: z.ZodType = z .object({ transloadit_template_id: z.string().optional(), - steps: z.record(z.string(), z.unknown()).optional(), + steps: optionalStepsSchema, }) - .passthrough() -export type TemplateFileData = z.infer + .passthrough() as z.ZodType export interface TemplateFile { file: string From 2fdc52548271ab4536643e12190849c70181c5f1 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Tue, 2 Dec 2025 20:12:01 +0100 Subject: [PATCH 08/45] wip --- src/cli/assemblies-create.ts | 29 +++++++++++++++++++++-------- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index 4c0c5362..665a53f3 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -8,7 +8,7 @@ import process from 'node:process' import type { Readable, Writable } from 'node:stream' import tty from 'node:tty' import { promisify } from 'node:util' -import { stepsSchema, type Steps } from '../alphalib/types/template.ts' +import type { StepsInput } from '../alphalib/types/template.ts' import type { CreateAssemblyParams } from '../apiTypes.ts' import type { AssemblyStatus, CreateAssemblyOptions, Transloadit } from '../Transloadit.ts' import JobsPromise from './JobsPromise.ts' @@ -600,16 +600,27 @@ export default async function run( let resolvedOutput = output if (resolvedOutput === undefined && !process.stdout.isTTY) resolvedOutput = '-' - // Read and validate steps file async before entering the Promise constructor - let stepsData: Steps | undefined + // Read steps file async before entering the Promise constructor + // We use StepsInput (the input type) rather than Steps (the transformed output type) + // to avoid zod adding default values that the API may reject + let stepsData: StepsInput | undefined if (steps) { const stepsContent = await fsp.readFile(steps, 'utf8') const parsed: unknown = JSON.parse(stepsContent) - const validated = stepsSchema.safeParse(parsed) - if (!validated.success) { - throw new Error(`Invalid steps format: ${validated.error.message}`) + // Basic structural validation: must be an object with step names as keys + if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { + throw new Error('Invalid steps format: expected an object with step names as keys') } - stepsData = validated.data + // Validate each step has a robot field + for (const [stepName, step] of Object.entries(parsed)) { + if (step == null || typeof step !== 'object' || Array.isArray(step)) { + throw new Error(`Invalid steps format: step '${stepName}' must be an object`) + } + if (!('robot' in step) || typeof (step as Record).robot !== 'string') { + throw new Error(`Invalid steps format: step '${stepName}' must have a 'robot' string property`) + } + } + stepsData = parsed as StepsInput } // Determine output stat async before entering the Promise constructor @@ -638,7 +649,9 @@ export default async function run( return new Promise((resolve, reject) => { const params: CreateAssemblyParams = ( - stepsData ? { steps: stepsData } : { template_id: template } + stepsData + ? { steps: stepsData as CreateAssemblyParams['steps'] } + : { template_id: template } ) as CreateAssemblyParams if (fields) { params.fields = fields From 90233437b4517589d08f13cea106e6f7d2adedbf Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Tue, 2 Dec 2025 20:35:18 +0100 Subject: [PATCH 09/45] fix version --- src/cli/commands/index.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/cli/commands/index.ts b/src/cli/commands/index.ts index 0e6e7fbd..83e5a099 100644 --- a/src/cli/commands/index.ts +++ b/src/cli/commands/index.ts @@ -1,5 +1,7 @@ import { Builtins, Cli } from 'clipanion' +import packageJson from '../../../package.json' with { type: 'json' } + import { AssembliesCreateCommand, AssembliesDeleteCommand, @@ -27,7 +29,7 @@ export function createCli(): Cli { const cli = new Cli({ binaryLabel: 'Transloadit CLI', binaryName: 'transloadit', - binaryVersion: '1.0.0', + binaryVersion: packageJson.version, }) // Built-in commands From dbcbaf421c812079b95072eb1d3ec9085e92cc67 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 10:51:20 +0100 Subject: [PATCH 10/45] format --- src/cli/assemblies-create.ts | 8 ++++---- src/cli/assemblies.ts | 3 ++- src/cli/templates.ts | 3 ++- src/cli/types.ts | 3 ++- 4 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index 665a53f3..dca9c5d9 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -617,7 +617,9 @@ export default async function run( throw new Error(`Invalid steps format: step '${stepName}' must be an object`) } if (!('robot' in step) || typeof (step as Record).robot !== 'string') { - throw new Error(`Invalid steps format: step '${stepName}' must have a 'robot' string property`) + throw new Error( + `Invalid steps format: step '${stepName}' must have a 'robot' string property`, + ) } } stepsData = parsed as StepsInput @@ -649,9 +651,7 @@ export default async function run( return new Promise((resolve, reject) => { const params: CreateAssemblyParams = ( - stepsData - ? { steps: stepsData as CreateAssemblyParams['steps'] } - : { template_id: template } + stepsData ? { steps: stepsData as CreateAssemblyParams['steps'] } : { template_id: template } ) as CreateAssemblyParams if (fields) { params.fields = fields diff --git a/src/cli/assemblies.ts b/src/cli/assemblies.ts index 2361a2a3..cf541c34 100644 --- a/src/cli/assemblies.ts +++ b/src/cli/assemblies.ts @@ -1,6 +1,7 @@ import { z } from 'zod' import { tryCatch } from '../alphalib/tryCatch.ts' -import { stepsSchema, type Steps } from '../alphalib/types/template.ts' +import type { Steps } from '../alphalib/types/template.ts' +import { stepsSchema } from '../alphalib/types/template.ts' import type { ReplayAssemblyParams } from '../apiTypes.ts' import type { Transloadit } from '../Transloadit.ts' import assembliesCreate from './assemblies-create.ts' diff --git a/src/cli/templates.ts b/src/cli/templates.ts index 8e15c3fa..da9b81bc 100644 --- a/src/cli/templates.ts +++ b/src/cli/templates.ts @@ -4,7 +4,8 @@ import { promisify } from 'node:util' import rreaddir from 'recursive-readdir' import { z } from 'zod' import { tryCatch } from '../alphalib/tryCatch.ts' -import { stepsSchema, type Steps } from '../alphalib/types/template.ts' +import type { Steps } from '../alphalib/types/template.ts' +import { stepsSchema } from '../alphalib/types/template.ts' import type { TemplateContent } from '../apiTypes.ts' import type { Transloadit } from '../Transloadit.ts' import { createReadStream, formatAPIError, streamToBuffer } from './helpers.ts' diff --git a/src/cli/types.ts b/src/cli/types.ts index b4ce710f..99c3821a 100644 --- a/src/cli/types.ts +++ b/src/cli/types.ts @@ -1,5 +1,6 @@ import { z } from 'zod' -import { optionalStepsSchema, type Steps } from '../alphalib/types/template.ts' +import type { Steps } from '../alphalib/types/template.ts' +import { optionalStepsSchema } from '../alphalib/types/template.ts' import type { BillResponse, ListedTemplate, TemplateResponse } from '../apiTypes.ts' import type { AssemblyStatus, Transloadit } from '../Transloadit.ts' import type { IOutputCtl } from './OutputCtl.ts' From 1fc64ca322930c13126593912b51e4bc38d5f26c Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 11:03:57 +0100 Subject: [PATCH 11/45] run e2e for own PRs --- .github/workflows/e2e.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 7e2d220f..bb2fd880 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -5,11 +5,17 @@ on: push: branches: - main + pull_request: schedule: - cron: '0 8 * * *' jobs: test: + # Run on push/schedule/dispatch, or on PRs only if from same repo (not forks) + # This protects secrets from being exposed to fork PRs + if: > + github.event_name != 'pull_request' || + github.event.pull_request.head.repo.full_name == github.repository runs-on: ubuntu-latest strategy: # e2e tests are not yet ready to run in parallel From 228afed97fc305876b47a6b3121256214ccbab22 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 11:07:23 +0100 Subject: [PATCH 12/45] dotenv --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 28434e1c..f3a5075a 100644 --- a/package.json +++ b/package.json @@ -31,7 +31,8 @@ "recursive-readdir": "^2.2.3", "tus-js-client": "^4.3.1", "type-fest": "^4.41.0", - "zod": "3.25.76" + "zod": "3.25.76", + "dotenv": "^17.2.3" }, "devDependencies": { "@biomejs/biome": "^2.2.4", @@ -40,7 +41,6 @@ "@types/temp": "^0.9.4", "@vitest/coverage-v8": "^3.2.4", "badge-maker": "^5.0.2", - "dotenv": "^17.2.3", "execa": "9.6.0", "image-size": "^2.0.2", "nock": "^14.0.10", From ba3823f976b5106920df8aa700a8ce6af1c93fb8 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 11:20:37 +0100 Subject: [PATCH 13/45] cli docs --- README.md | 151 ++++++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 131 insertions(+), 20 deletions(-) diff --git a/README.md b/README.md index 416ad85a..4f086383 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,136 @@ or npm install --save transloadit ``` -## Usage +## Command Line Interface (CLI) + +This package includes a full-featured CLI for interacting with Transloadit from your terminal. + +### Quick Start + +```bash +# Set your credentials +export TRANSLOADIT_KEY="YOUR_TRANSLOADIT_KEY" +export TRANSLOADIT_SECRET="YOUR_TRANSLOADIT_SECRET" + +# See all available commands +npx transloadit --help +``` + +### Processing Media + +Create Assemblies to process files using Assembly Instructions (steps) or Templates: + +```bash +# Process a file using a steps file +npx transloadit assemblies create --steps steps.json --input image.jpg --output result.jpg + +# Process using a Template +npx transloadit assemblies create --template YOUR_TEMPLATE_ID --input image.jpg --output result.jpg + +# Process with custom fields +npx transloadit assemblies create --template YOUR_TEMPLATE_ID --field size=100 --input image.jpg --output thumb.jpg + +# Process a directory of files +npx transloadit assemblies create --template YOUR_TEMPLATE_ID --input images/ --output thumbs/ + +# Process recursively with file watching +npx transloadit assemblies create --template YOUR_TEMPLATE_ID --input images/ --output thumbs/ --recursive --watch +``` + +### Managing Assemblies + +```bash +# List recent assemblies +npx transloadit assemblies list + +# List assemblies with filters +npx transloadit assemblies list --after 2024-01-01 --before 2024-12-31 + +# Get assembly status +npx transloadit assemblies get ASSEMBLY_ID + +# Cancel an assembly +npx transloadit assemblies delete ASSEMBLY_ID + +# Replay an assembly (re-run with original instructions) +npx transloadit assemblies replay ASSEMBLY_ID + +# Replay with different steps +npx transloadit assemblies replay --steps new-steps.json ASSEMBLY_ID + +# Replay using latest template version +npx transloadit assemblies replay --reparse-template ASSEMBLY_ID +``` + +### Managing Templates + +```bash +# List all templates +npx transloadit templates list + +# Get template content +npx transloadit templates get TEMPLATE_ID + +# Create a template from a JSON file +npx transloadit templates create my-template template.json + +# Modify a template +npx transloadit templates modify TEMPLATE_ID template.json + +# Rename a template +npx transloadit templates modify TEMPLATE_ID --name new-name + +# Delete a template +npx transloadit templates delete TEMPLATE_ID + +# Sync local template files with Transloadit (bidirectional) +npx transloadit templates sync templates/*.json +npx transloadit templates sync --recursive templates/ +``` + +### Billing + +```bash +# Get bill for a month +npx transloadit bills get 2024-01 + +# Get detailed bill as JSON +npx transloadit bills get 2024-01 --json +``` + +### Assembly Notifications + +```bash +# List notifications for an assembly +npx transloadit assembly-notifications list ASSEMBLY_ID + +# Replay a notification +npx transloadit assembly-notifications replay ASSEMBLY_ID +``` + +### Signature Generation + +```bash +# Generate a signature for assembly params +echo '{"steps":{}}' | npx transloadit auth signature + +# Generate with specific algorithm +echo '{"steps":{}}' | npx transloadit auth signature --algorithm sha256 + +# Generate a signed Smart CDN URL +echo '{"workspace":"my-workspace","template":"my-template","input":"image.jpg"}' | npx transloadit auth smart-cdn +``` + +### CLI Options + +All commands support these common options: + +- `--json, -j` - Output results as JSON +- `--verbose, -v` - Verbose output +- `--quiet, -q` - Suppress non-essential output +- `--help, -h` - Show help for a command + +## SDK Usage The following code will upload an image and resize it to a thumbnail: @@ -385,25 +514,7 @@ Calculates a signature for the given `params` JSON object. If the `params` objec This function returns an object with the key `signature` (containing the calculated signature string) and a key `params`, which contains the stringified version of the passed `params` object (including the set expires and authKey keys). -#### CLI smart_sig - -Generate a signed Smart CDN URL from the command line. The CLI reads a JSON object from stdin, injects credentials from `TRANSLOADIT_KEY`/`TRANSLOADIT_SECRET`, and prints the URL returned by `getSignedSmartCDNUrl()`. - -```sh -TRANSLOADIT_KEY=... TRANSLOADIT_SECRET=... \ - printf '{"workspace":"demo","template":"resize","input":"image.jpg","url_params":{"width":320}}' | npx transloadit smart_sig -``` - -You can also use `TRANSLOADIT_AUTH_KEY`/`TRANSLOADIT_AUTH_SECRET` as aliases for the environment variables. - -#### CLI sig - -Sign assembly params from the command line. The CLI reads a JSON object from stdin (or falls back to an empty object), injects credentials from `TRANSLOADIT_KEY`/`TRANSLOADIT_SECRET`, and prints the payload returned by `calcSignature()`. Use `--algorithm` to pick a specific hashing algorithm; it defaults to `sha384`. - -```sh -TRANSLOADIT_KEY=... TRANSLOADIT_SECRET=... \ - printf '{"auth":{"expires":"2025-01-02T00:00:00Z"}}' | npx transloadit sig --algorithm sha256 -``` +See [Signature Generation](#signature-generation) in the CLI section for command-line usage. #### getSignedSmartCDNUrl(params) From d28058ff21fe74e153cf001c6ab4af8a1eb53dee Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 11:22:45 +0100 Subject: [PATCH 14/45] add missing docs --- README.md | 33 ++++++++++++++++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 4f086383..bdc1df73 100644 --- a/README.md +++ b/README.md @@ -226,10 +226,12 @@ You can find [details about your executed Assemblies here](https://transloadit.c - [Upload and resize image](https://github.com/transloadit/node-sdk/blob/main/examples/resize_an_image.ts) - [Upload image and convert to WebP](https://github.com/transloadit/node-sdk/blob/main/examples/convert_to_webp.ts) +- [Rasterize SVG to PNG](https://github.com/transloadit/node-sdk/blob/main/examples/rasterize_svg_to_png.ts) - [Crop a face out of an image and download the result](https://github.com/transloadit/node-sdk/blob/main/examples/face_detect_download.ts) - [Retry example](https://github.com/transloadit/node-sdk/blob/main/examples/retry.ts) - [Calculate total costs (GB usage)](https://github.com/transloadit/node-sdk/blob/main/examples/fetch_costs_of_all_assemblies_in_timeframe.ts) - [Templates CRUD](https://github.com/transloadit/node-sdk/blob/main/examples/template_api.ts) +- [Template Credentials CRUD](https://github.com/transloadit/node-sdk/blob/main/examples/credentials.ts) For more fully working examples take a look at [`examples/`](https://github.com/transloadit/node-sdk/blob/main/examples/). @@ -245,6 +247,7 @@ Table of contents: - [Assemblies](#assemblies) - [Assembly notifications](#assembly-notifications) - [Templates](#templates) +- [Template Credentials](#template-credentials) - [Errors](#errors) - [Rate limiting & auto retry](#rate-limiting--auto-retry) @@ -498,6 +501,34 @@ The method returns an object containing these properties: Creates an `objectMode` `Readable` stream that automates handling of `listTemplates` pagination. Similar to `streamAssemblies`. +### Template Credentials + +Template Credentials allow you to store third-party credentials (e.g., AWS S3, Google Cloud Storage, FTP) securely on Transloadit for use in your Assembly Instructions. + +#### async createTemplateCredential(params) + +Creates a new Template Credential. The `params` object should contain the credential configuration. See [API documentation](https://transloadit.com/docs/api/template-credentials-post/). + +#### async editTemplateCredential(credentialId, params) + +Updates an existing Template Credential identified by `credentialId`. See [API documentation](https://transloadit.com/docs/api/template-credentials-credential-id-put/). + +#### async deleteTemplateCredential(credentialId) + +Deletes the Template Credential identified by `credentialId`. See [API documentation](https://transloadit.com/docs/api/template-credentials-credential-id-delete/). + +#### async getTemplateCredential(credentialId) + +Retrieves the Template Credential identified by `credentialId`. See [API documentation](https://transloadit.com/docs/api/template-credentials-credential-id-get/). + +#### async listTemplateCredentials(params) + +Lists all Template Credentials. See [API documentation](https://transloadit.com/docs/api/template-credentials-get/). + +#### streamTemplateCredentials(params) + +Creates an `objectMode` `Readable` stream that automates handling of `listTemplateCredentials` pagination. Similar to `streamAssemblies`. + ### Other #### setDefaultTimeout(timeout) @@ -606,7 +637,7 @@ If you want to retry on other errors, please see the [retry example code](exampl This project uses [debug](https://github.com/visionmedia/debug) so you can run node with the `DEBUG=transloadit` evironment variable to enable verbose logging. Example: ```bash -DEBUG=transloadit* node examples/template_api.js +DEBUG=transloadit* npx tsx examples/template_api.ts ``` ## Maintainers From 170aba2836f1a553f11e702ecd0cb5f624b7a8ad Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 11:26:59 +0100 Subject: [PATCH 15/45] merge CI --- .github/workflows/ci.yml | 100 ++++++++++++++++++++++++++++++++++++-- .github/workflows/e2e.yml | 94 ----------------------------------- 2 files changed, 96 insertions(+), 98 deletions(-) delete mode 100644 .github/workflows/e2e.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 68b1afd1..8795fd06 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,15 +1,19 @@ name: CI on: + workflow_dispatch: pull_request: push: branches: - main tags: - '*' + schedule: + - cron: '0 8 * * *' jobs: pack: + name: Build package runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -24,6 +28,7 @@ jobs: path: '*.tgz' biome: + name: Lint (Biome) runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -34,6 +39,7 @@ jobs: - run: corepack yarn lint:js typescript: + name: Lint (TypeScript) runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -43,7 +49,8 @@ jobs: - run: corepack yarn - run: corepack yarn lint:ts - vitest: + unit: + name: Unit tests (Node ${{ matrix.node }}) runs-on: ubuntu-latest strategy: matrix: @@ -58,14 +65,86 @@ jobs: - run: corepack yarn - run: corepack yarn test:unit - name: Upload coverage reports artifact - if: matrix.node == 22 # Only upload coverage from the latest Node.js version + if: matrix.node == 22 uses: actions/upload-artifact@v4 with: name: coverage-reports path: coverage/ + e2e: + name: E2E tests (Node ${{ matrix.node }}) + # Run on push/schedule/dispatch, or on PRs only if from same repo (not forks) + # This protects secrets from being exposed to fork PRs + if: > + github.event_name != 'pull_request' || + github.event.pull_request.head.repo.full_name == github.repository + runs-on: ubuntu-latest + strategy: + # e2e tests are not yet ready to run in parallel + max-parallel: 1 + matrix: + node: + - 20 + - 22 + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node }} + - run: corepack yarn + - name: Download cloudflared + run: | + curl -fsSLo cloudflared-linux-amd64 https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 + chmod +x cloudflared-linux-amd64 + + # can be used for debugging: + # - name: Setup tmate session + # uses: mxschmitt/action-tmate@v3 + + - run: corepack yarn test + env: + TRANSLOADIT_KEY: ${{ secrets.TRANSLOADIT_KEY }} + TRANSLOADIT_SECRET: ${{ secrets.TRANSLOADIT_SECRET }} + NODE_OPTIONS: --trace-deprecation --trace-warnings + CLOUDFLARED_PATH: ./cloudflared-linux-amd64 + DEBUG: 'transloadit:*' + + - if: matrix.node == 22 + name: Generate the badge from the json-summary + run: node --experimental-strip-types test/generate-coverage-badge.ts coverage/coverage-summary.json + - if: matrix.node == 22 + name: Move HTML report and badge to the correct location + run: | + mv coverage/lcov-report static-build + mv coverage-badge.svg static-build/ + # *** BEGIN PUBLISH STATIC SITE STEPS *** + # Use the standard checkout action to check out the destination repo to a separate directory + # See https://github.com/mifi/github-action-push-static + - if: matrix.node == 22 + uses: actions/checkout@v4 + with: + ssh-key: ${{ secrets.COVERAGE_REPO_SSH_PRIVATE_KEY }} + repository: transloadit/node-sdk-coverage + path: static-files-destination + + # Push coverage data + - if: matrix.node == 22 + run: | + git config --global user.name github-actions + git config --global user.email github-actions@github.com + # Remove existing files: + rm -rf static-files-destination/* + # Replace with new files: + cp -a static-build/* static-files-destination/ + cd static-files-destination + git add . + # git diff-index: to avoid doing the git commit failing if there are no changes to be commit + git diff-index --quiet HEAD || git commit --message 'Static file updates' + git push + coverage: - needs: vitest + name: Upload coverage + needs: unit runs-on: ubuntu-latest if: github.ref == 'refs/heads/main' steps: @@ -83,13 +162,26 @@ jobs: name: node-sdk fail_ci_if_error: true + slack-on-failure: + name: Slack notification + needs: [e2e] + if: ${{ failure() && github.ref == 'refs/heads/main' }} + runs-on: ubuntu-latest + steps: + - uses: 8398a7/action-slack@v3 + with: + status: failure + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + release: + name: Publish to npm runs-on: ubuntu-latest needs: - pack - biome - typescript - - vitest + - unit if: startsWith(github.ref, 'refs/tags/') permissions: id-token: write diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml deleted file mode 100644 index bb2fd880..00000000 --- a/.github/workflows/e2e.yml +++ /dev/null @@ -1,94 +0,0 @@ -name: e2e tests - -on: - workflow_dispatch: - push: - branches: - - main - pull_request: - schedule: - - cron: '0 8 * * *' - -jobs: - test: - # Run on push/schedule/dispatch, or on PRs only if from same repo (not forks) - # This protects secrets from being exposed to fork PRs - if: > - github.event_name != 'pull_request' || - github.event.pull_request.head.repo.full_name == github.repository - runs-on: ubuntu-latest - strategy: - # e2e tests are not yet ready to run in parallel - max-parallel: 1 - matrix: - node: - - 20.19.0 - - 22.14.0 - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 - with: - node-version: ${{ matrix.node }} - - run: corepack yarn - - name: Download cloudflared - run: | - curl -fsSLo cloudflared-linux-amd64 https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 - chmod +x cloudflared-linux-amd64 - - # can be used for debugging: - # - name: Setup tmate session - # uses: mxschmitt/action-tmate@v3 - - - run: corepack yarn test - env: - TRANSLOADIT_KEY: ${{ secrets.TRANSLOADIT_KEY }} - TRANSLOADIT_SECRET: ${{ secrets.TRANSLOADIT_SECRET }} - NODE_OPTIONS: --trace-deprecation --trace-warnings - CLOUDFLARED_PATH: ./cloudflared-linux-amd64 - DEBUG: 'transloadit:*' - - - if: matrix.node == '22.14.0' - name: Generate the badge from the json-summary - run: node --experimental-strip-types test/generate-coverage-badge.ts coverage/coverage-summary.json - - if: matrix.node == '22.14.0' - name: Move HTML report and badge to the correct location - run: | - mv coverage/lcov-report static-build - mv coverage-badge.svg static-build/ - # *** BEGIN PUBLISH STATIC SITE STEPS *** - # Use the standard checkout action to check out the destination repo to a separate directory - # See https://github.com/mifi/github-action-push-static - - if: matrix.node == '22.14.0' - uses: actions/checkout@v4 - with: - ssh-key: ${{ secrets.COVERAGE_REPO_SSH_PRIVATE_KEY }} - repository: transloadit/node-sdk-coverage - path: static-files-destination - - # Push coverage data - - if: matrix.node == '22.14.0' - run: | - git config --global user.name github-actions - git config --global user.email github-actions@github.com - # Remove existing files: - rm -rf static-files-destination/* - # Replace with new files: - cp -a static-build/* static-files-destination/ - cd static-files-destination - git add . - # git diff-index: to avoid doing the git commit failing if there are no changes to be commit - git diff-index --quiet HEAD || git commit --message 'Static file updates' - git push - - slack-on-failure: - needs: [test] - if: ${{ failure() && github.ref == 'refs/heads/main' }} - - runs-on: ubuntu-latest - - steps: - - uses: 8398a7/action-slack@v3 - with: - status: failure - env: - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} From 4c14d93dcd8f50b2db3b5b2db0fbe4a1cd88a95a Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 11:28:19 +0100 Subject: [PATCH 16/45] add node 24 --- .github/workflows/ci.yml | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8795fd06..72194412 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -57,6 +57,7 @@ jobs: node: - 20 - 22 + - 24 steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 @@ -65,7 +66,7 @@ jobs: - run: corepack yarn - run: corepack yarn test:unit - name: Upload coverage reports artifact - if: matrix.node == 22 + if: matrix.node == 24 uses: actions/upload-artifact@v4 with: name: coverage-reports @@ -86,6 +87,7 @@ jobs: node: - 20 - 22 + - 24 steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 @@ -109,10 +111,10 @@ jobs: CLOUDFLARED_PATH: ./cloudflared-linux-amd64 DEBUG: 'transloadit:*' - - if: matrix.node == 22 + - if: matrix.node == 24 name: Generate the badge from the json-summary run: node --experimental-strip-types test/generate-coverage-badge.ts coverage/coverage-summary.json - - if: matrix.node == 22 + - if: matrix.node == 24 name: Move HTML report and badge to the correct location run: | mv coverage/lcov-report static-build @@ -120,7 +122,7 @@ jobs: # *** BEGIN PUBLISH STATIC SITE STEPS *** # Use the standard checkout action to check out the destination repo to a separate directory # See https://github.com/mifi/github-action-push-static - - if: matrix.node == 22 + - if: matrix.node == 24 uses: actions/checkout@v4 with: ssh-key: ${{ secrets.COVERAGE_REPO_SSH_PRIVATE_KEY }} @@ -128,7 +130,7 @@ jobs: path: static-files-destination # Push coverage data - - if: matrix.node == 22 + - if: matrix.node == 24 run: | git config --global user.name github-actions git config --global user.email github-actions@github.com From 946a8f6679e0d22c31557078a467a5c0b23f7d1d Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 11:29:33 +0100 Subject: [PATCH 17/45] e2e only on 24 --- .github/workflows/ci.yml | 24 ++++++------------------ 1 file changed, 6 insertions(+), 18 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 72194412..85790477 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -73,26 +73,18 @@ jobs: path: coverage/ e2e: - name: E2E tests (Node ${{ matrix.node }}) + name: E2E tests # Run on push/schedule/dispatch, or on PRs only if from same repo (not forks) # This protects secrets from being exposed to fork PRs if: > github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository runs-on: ubuntu-latest - strategy: - # e2e tests are not yet ready to run in parallel - max-parallel: 1 - matrix: - node: - - 20 - - 22 - - 24 steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: - node-version: ${{ matrix.node }} + node-version: 24 - run: corepack yarn - name: Download cloudflared run: | @@ -111,27 +103,23 @@ jobs: CLOUDFLARED_PATH: ./cloudflared-linux-amd64 DEBUG: 'transloadit:*' - - if: matrix.node == 24 - name: Generate the badge from the json-summary + - name: Generate the badge from the json-summary run: node --experimental-strip-types test/generate-coverage-badge.ts coverage/coverage-summary.json - - if: matrix.node == 24 - name: Move HTML report and badge to the correct location + - name: Move HTML report and badge to the correct location run: | mv coverage/lcov-report static-build mv coverage-badge.svg static-build/ # *** BEGIN PUBLISH STATIC SITE STEPS *** # Use the standard checkout action to check out the destination repo to a separate directory # See https://github.com/mifi/github-action-push-static - - if: matrix.node == 24 - uses: actions/checkout@v4 + - uses: actions/checkout@v4 with: ssh-key: ${{ secrets.COVERAGE_REPO_SSH_PRIVATE_KEY }} repository: transloadit/node-sdk-coverage path: static-files-destination # Push coverage data - - if: matrix.node == 24 - run: | + - run: | git config --global user.name github-actions git config --global user.email github-actions@github.com # Remove existing files: From 90cde44a82d49c3b1774f085ba289235945754ba Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 14:50:50 +0100 Subject: [PATCH 18/45] test: use unique template names to avoid CI conflicts MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Use a unique testId (timestamp + random string) for all template names in e2e tests to prevent conflicts when tests run in parallel or when previous runs didn't clean up properly. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- test/e2e/cli/templates.test.ts | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/test/e2e/cli/templates.test.ts b/test/e2e/cli/templates.test.ts index a02ad873..7becd94d 100644 --- a/test/e2e/cli/templates.test.ts +++ b/test/e2e/cli/templates.test.ts @@ -10,6 +10,9 @@ import type { OutputEntry } from './test-utils.ts' import { authKey, authSecret, delay, testCase } from './test-utils.ts' describe('templates', () => { + // Use unique prefix for all template names to avoid conflicts between test runs + const testId = `${Date.now()}-${Math.random().toString(36).slice(2, 8)}` + describe('create', () => { it( 'should create templates', @@ -18,7 +21,7 @@ describe('templates', () => { const output = new OutputCtl() const steps = { import: { robot: '/http/import', url: `https://example.com/${n}` } } await fsp.writeFile(`${n}.json`, JSON.stringify(steps)) - await templates.create(output, client, { name: `test-${n}`, file: `${n}.json` }) + await templates.create(output, client, { name: `test-${testId}-${n}`, file: `${n}.json` }) return output.get() as OutputEntry[] }) @@ -88,7 +91,7 @@ describe('templates', () => { describe('modify', () => { let templateId: string - const originalName = `original-name-${Date.now()}` + const originalName = `original-name-${testId}` beforeAll(async () => { const client = new TransloaditClient({ authKey, authSecret }) @@ -127,10 +130,11 @@ describe('templates', () => { testCase(async (client) => { await fsp.writeFile('template.json', '') + const newName = `new-name-${testId}` const output = new OutputCtl() await templates.modify(output, client, { template: templateId, - name: 'new-name', + name: newName, file: 'template.json', }) const result = output.get() @@ -138,7 +142,7 @@ describe('templates', () => { expect(result).to.have.lengthOf(0) await delay(2000) const template = await client.getTemplate(templateId) - expect(template).to.have.property('name').that.equals('new-name') + expect(template).to.have.property('name').that.equals(newName) expect(template).to.have.property('content').that.has.property('steps') }), ) @@ -149,10 +153,11 @@ describe('templates', () => { const steps = { import: { robot: '/http/import', url: 'https://example.com/renamed' } } await fsp.writeFile('template.json', JSON.stringify(steps)) + const newerName = `newer-name-${testId}` const output = new OutputCtl() await templates.modify(output, client, { template: templateId, - name: 'newer-name', + name: newerName, file: 'template.json', }) const result = output.get() @@ -160,7 +165,7 @@ describe('templates', () => { expect(result).to.have.lengthOf(0) await delay(2000) const template = await client.getTemplate(templateId) - expect(template).to.have.property('name').that.equals('newer-name') + expect(template).to.have.property('name').that.equals(newerName) expect(template).to.have.property('content').that.has.property('steps') }), ) @@ -178,7 +183,7 @@ describe('templates', () => { const ids = await Promise.all( [1, 2, 3, 4, 5].map(async (n) => { const response = await client.createTemplate({ - name: `delete-test-${n}`, + name: `delete-test-${testId}-${n}`, template: { steps: { dummy: { robot: '/html/convert', url: `https://example.com/${n}` } }, } as TemplateContent, @@ -254,7 +259,7 @@ describe('templates', () => { 'should update local files when outdated', testCase(async (client) => { const params = { - name: `test-local-update-${Date.now()}`, + name: `test-local-update-${testId}`, template: { steps: { dummy: { robot: '/html/convert', url: 'https://example.com/changed' } }, } as TemplateContent, @@ -292,7 +297,7 @@ describe('templates', () => { 'should update remote template when outdated', testCase(async (client) => { const params = { - name: `test-remote-update-${Date.now()}`, + name: `test-remote-update-${testId}`, template: { steps: { dummy: { robot: '/html/convert', url: 'https://example.com/unchanged' } }, } as TemplateContent, From 21b85f7a296b7a0eccf42e091d4750ba9e2d5dc9 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 15:30:56 +0100 Subject: [PATCH 19/45] feat: honor abort signal during awaitAssemblyCompletion polling MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Previously, the abort signal passed to createAssembly was only honored during the initial HTTP POST and TUS uploads. Now it's also honored during the polling loop in awaitAssemblyCompletion, allowing users to cancel long-running assembly operations at any point. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/Transloadit.ts | 35 ++++++++++++++++++++++++++++++++--- test/unit/mock-http.test.ts | 22 ++++++++++++++++++++++ 2 files changed, 54 insertions(+), 3 deletions(-) diff --git a/src/Transloadit.ts b/src/Transloadit.ts index af88d10f..ed0f2ecc 100644 --- a/src/Transloadit.ts +++ b/src/Transloadit.ts @@ -102,6 +102,11 @@ export interface AwaitAssemblyCompletionOptions { timeout?: number interval?: number startTimeMs?: number + /** + * Optional AbortSignal to cancel polling. + * When aborted, the polling loop will stop and throw an AbortError. + */ + signal?: AbortSignal } export interface SmartCDNUrlOptions { @@ -339,6 +344,7 @@ export class Transloadit { timeout, onAssemblyProgress, startTimeMs, + signal, }) checkResult(awaitResult) return awaitResult @@ -358,12 +364,18 @@ export class Transloadit { timeout, startTimeMs = getHrTimeMs(), interval = 1000, + signal, }: AwaitAssemblyCompletionOptions = {}, ): Promise { assert.ok(assemblyId) while (true) { - const result = await this.getAssembly(assemblyId) + // Check if aborted before making the request + if (signal?.aborted) { + throw signal.reason ?? new DOMException('Aborted', 'AbortError') + } + + const result = await this.getAssembly(assemblyId, { signal }) // If 'ok' is not in result, it implies a terminal state (e.g., error, completed, canceled). // If 'ok' is present, then we check if it's one of the non-terminal polling states. @@ -391,7 +403,19 @@ export class Transloadit { if (timeout != null && nowMs - startTimeMs >= timeout) { throw new PollingTimeoutError('Polling timed out') } - await new Promise((resolve) => setTimeout(resolve, interval)) + + // Make the sleep abortable + await new Promise((resolve, reject) => { + const timeoutId = setTimeout(resolve, interval) + signal?.addEventListener( + 'abort', + () => { + clearTimeout(timeoutId) + reject(signal.reason ?? new DOMException('Aborted', 'AbortError')) + }, + { once: true }, + ) + }) } } @@ -523,11 +547,16 @@ export class Transloadit { * Get an Assembly * * @param assemblyId the Assembly Id + * @param options optional request options * @returns the retrieved Assembly */ - async getAssembly(assemblyId: string): Promise { + async getAssembly( + assemblyId: string, + options?: { signal?: AbortSignal }, + ): Promise { const rawResult = await this._remoteJson, OptionalAuthParams>({ urlSuffix: `/assemblies/${assemblyId}`, + signal: options?.signal, }) const parsedResult = zodParseWithContext(assemblyStatusSchema, rawResult) diff --git a/test/unit/mock-http.test.ts b/test/unit/mock-http.test.ts index 09776526..65b85b63 100644 --- a/test/unit/mock-http.test.ts +++ b/test/unit/mock-http.test.ts @@ -54,6 +54,28 @@ describe('Mocked API tests', () => { scope.done() }) + it('should honor abort signal during awaitAssemblyCompletion polling', async () => { + const client = getLocalClient() + + // Set up a mock that keeps returning ASSEMBLY_EXECUTING (never completes) + const scope = nock('http://localhost') + .get('/assemblies/1') + .query(() => true) + .reply(200, { ok: 'ASSEMBLY_EXECUTING', assembly_url: '', assembly_ssl_url: '' }) + .persist() // Keep responding with same status + + const controller = new AbortController() + + // Abort after 50ms + setTimeout(() => controller.abort(), 50) + + await expect( + client.awaitAssemblyCompletion('1', { interval: 10, signal: controller.signal }), + ).rejects.toThrow(expect.objectContaining({ name: 'AbortError' })) + + scope.persist(false) + }) + it('should handle aborted correctly', async () => { const client = getLocalClient() From 35e50cda8a0e425a4d1c972bbc44783f0f0e6559 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 15:50:53 +0100 Subject: [PATCH 20/45] add browser --- .gemini/settings.json | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 .gemini/settings.json diff --git a/.gemini/settings.json b/.gemini/settings.json new file mode 100644 index 00000000..2704fdc3 --- /dev/null +++ b/.gemini/settings.json @@ -0,0 +1,11 @@ +{ + "mcpServers": { + "playwright": { + "command": "npx", + "args": [ + "-y", + "@playwright/mcp@latest" + ] + } + } +} \ No newline at end of file From da6f0b42dd21841ba42976caa8bff816e5b18a0f Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 15:51:02 +0100 Subject: [PATCH 21/45] fix bug --- src/cli/assemblies-create.ts | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index dca9c5d9..3676f193 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -705,7 +705,9 @@ export default async function run( const assemblyId = result.assembly_id if (!assemblyId) throw new Error('No assembly_id in result') - let assembly: AssemblyStatus = await client.getAssembly(assemblyId) + let assembly: AssemblyStatus = await client.getAssembly(assemblyId, { + signal: abortController.signal, + }) while ( assembly.ok !== 'ASSEMBLY_COMPLETED' && @@ -713,9 +715,23 @@ export default async function run( !assembly.error ) { if (superceded) return + if (abortController.signal.aborted) { + throw abortController.signal.reason || new Error('Aborted') + } + outputctl.debug(`Assembly status: ${assembly.ok}`) - await new Promise((resolve) => setTimeout(resolve, 1000)) - assembly = await client.getAssembly(assemblyId) + await new Promise((resolve, reject) => { + const timer = setTimeout(resolve, 1000) + abortController.signal.addEventListener( + 'abort', + () => { + clearTimeout(timer) + reject(abortController.signal.reason || new Error('Aborted')) + }, + { once: true }, + ) + }) + assembly = await client.getAssembly(assemblyId, { signal: abortController.signal }) } if (assembly.error || (assembly.ok && assembly.ok !== 'ASSEMBLY_COMPLETED')) { From ce030e9c88ee176b84cb1b6c08095d7b9c6a7555 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 15:58:25 +0100 Subject: [PATCH 22/45] fix: address code review feedback MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - JobsPromise now throws if error handler not set before adding promises - Added clarifying comment explaining orphaned promise pattern in createAssembly - WatchJobEmitter now properly cleans up file watchers on SIGINT/SIGTERM and errors - Expanded --verbose/--quiet documentation with output level table 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- README.md | 14 ++++++++-- src/Transloadit.ts | 9 ++++-- src/cli/JobsPromise.ts | 7 ++++- src/cli/assemblies-create.ts | 27 +++++++++++++++--- test/unit/cli/JobsPromise.test.ts | 46 +++++++++++++++++++++++++++++++ 5 files changed, 92 insertions(+), 11 deletions(-) create mode 100644 test/unit/cli/JobsPromise.test.ts diff --git a/README.md b/README.md index bdc1df73..24dc1e1f 100644 --- a/README.md +++ b/README.md @@ -166,11 +166,19 @@ echo '{"workspace":"my-workspace","template":"my-template","input":"image.jpg"}' All commands support these common options: -- `--json, -j` - Output results as JSON -- `--verbose, -v` - Verbose output -- `--quiet, -q` - Suppress non-essential output +- `--json, -j` - Output results as JSON (useful for scripting) +- `--verbose, -v` - Enable debug output (shows DEBUG messages in addition to INFO/WARNING/ERROR) +- `--quiet, -q` - Suppress non-essential output (only shows ERROR messages, hides INFO and WARNING) - `--help, -h` - Show help for a command +Output levels: + +| Flag | ERROR | WARNING | INFO | DEBUG | +| ----------- | ----- | ------- | ---- | ----- | +| `--quiet` | ✓ | | | | +| _(default)_ | ✓ | ✓ | ✓ | | +| `--verbose` | ✓ | ✓ | ✓ | ✓ | + ## SDK Usage The following code will upload an image and resize it to a thumbnail: diff --git a/src/Transloadit.ts b/src/Transloadit.ts index ed0f2ecc..bdf57c68 100644 --- a/src/Transloadit.ts +++ b/src/Transloadit.ts @@ -299,9 +299,12 @@ export class Transloadit { stream.pause() } - // If any stream emits error, we want to handle this and exit with error - // Note: We add a no-op catch to prevent unhandled rejection when createAssemblyAndUpload - // completes first and this promise is orphaned (but streams may still error later) + // If any stream emits error, we want to handle this and exit with error. + // This promise races against createAssemblyAndUpload() below via Promise.race(). + // When createAssemblyAndUpload wins the race, this promise becomes "orphaned" - + // it's no longer awaited, but stream error handlers remain attached. + // The no-op catch prevents Node's unhandled rejection warning if a stream + // errors after the race is already won. const streamErrorPromise = new Promise((_resolve, reject) => { for (const { stream } of allStreams) { stream.on('error', reject) diff --git a/src/cli/JobsPromise.ts b/src/cli/JobsPromise.ts index 4cc582b2..dbbfe703 100644 --- a/src/cli/JobsPromise.ts +++ b/src/cli/JobsPromise.ts @@ -19,12 +19,17 @@ export default class JobsPromise { /** * Add a promise to track. If the promise rejects, * the error handler will be called. + * @throws Error if error handler has not been set via setErrorHandler() */ add(promise: Promise): void { + if (this.onError === null) { + throw new Error('JobsPromise: error handler must be set before adding promises') + } this.promises.add(promise) + const errorHandler = this.onError promise .catch((err: unknown) => { - this.onError?.(err) + errorHandler(err) }) .finally(() => { this.promises.delete(promise) diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index 3676f193..29db07be 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -20,6 +20,7 @@ interface NodeWatcher { on(event: 'close', listener: () => void): void on(event: 'change', listener: (evt: string, filename: string) => void): void on(event: string, listener: (...args: unknown[]) => void): void + close(): void } type NodeWatchFn = (path: string, options?: { recursive?: boolean }) => NodeWatcher @@ -334,12 +335,27 @@ class NullJobEmitter extends MyEventEmitter { } class WatchJobEmitter extends MyEventEmitter { + private watcher: NodeWatcher | null = null + constructor({ file, streamRegistry, recursive, outstreamProvider }: WatchJobEmitterOptions) { super() this.init({ file, streamRegistry, recursive, outstreamProvider }).catch((err) => { this.emit('error', err) }) + + // Clean up watcher on process exit signals + const cleanup = () => this.close() + process.once('SIGINT', cleanup) + process.once('SIGTERM', cleanup) + } + + /** Close the file watcher and release resources */ + close(): void { + if (this.watcher) { + this.watcher.close() + this.watcher = null + } } private async init({ @@ -352,11 +368,14 @@ class WatchJobEmitter extends MyEventEmitter { const topdir = stats.isDirectory() ? file : undefined const watchFn = await getNodeWatch() - const watcher = watchFn(file, { recursive }) + this.watcher = watchFn(file, { recursive }) - watcher.on('error', (err: Error) => this.emit('error', err)) - watcher.on('close', () => this.emit('end')) - watcher.on('change', (_evt: string, filename: string) => { + this.watcher.on('error', (err: Error) => { + this.close() + this.emit('error', err) + }) + this.watcher.on('close', () => this.emit('end')) + this.watcher.on('change', (_evt: string, filename: string) => { const normalizedFile = path.normalize(filename) this.handleChange(normalizedFile, topdir, streamRegistry, outstreamProvider).catch((err) => { this.emit('error', err) diff --git a/test/unit/cli/JobsPromise.test.ts b/test/unit/cli/JobsPromise.test.ts new file mode 100644 index 00000000..e7c73553 --- /dev/null +++ b/test/unit/cli/JobsPromise.test.ts @@ -0,0 +1,46 @@ +import { describe, expect, it, vi } from 'vitest' +import JobsPromise from '../../../src/cli/JobsPromise.ts' + +describe('JobsPromise', () => { + it('should call error handler when promise rejects', async () => { + const jobs = new JobsPromise() + const errorHandler = vi.fn() + jobs.setErrorHandler(errorHandler) + + const error = new Error('test error') + jobs.add(Promise.reject(error)) + + await jobs.allSettled() + + expect(errorHandler).toHaveBeenCalledWith(error) + }) + + it('should collect fulfilled values from allSettled', async () => { + const jobs = new JobsPromise() + jobs.setErrorHandler(() => {}) + + jobs.add(Promise.resolve('a')) + jobs.add(Promise.resolve('b')) + jobs.add(Promise.reject(new Error('ignored'))) + + const results = await jobs.allSettled() + + expect(results).toContain('a') + expect(results).toContain('b') + expect(results).toHaveLength(2) + }) + + it('should throw if error handler is not set and promise rejects', async () => { + const jobs = new JobsPromise() + // Intentionally NOT setting error handler + + // Create a promise that we'll handle to avoid unhandled rejection + const rejectingPromise = Promise.reject(new Error('test')) + rejectingPromise.catch(() => {}) // Prevent unhandled rejection warning + + // This should throw because no error handler is set + expect(() => jobs.add(rejectingPromise)).toThrow( + 'JobsPromise: error handler must be set before adding promises' + ) + }) +}) From 44aa6e3c8d0ab1b31499d017cd9f826d0d26580e Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 16:07:22 +0100 Subject: [PATCH 23/45] feat: replace --verbose/--quiet with --log-level (-l) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Use syslog-style severity levels inspired by @transloadit/sev-logger: - err (3): Error conditions - warn (4): Warning conditions - notice (5): Normal but significant (default) - info (6): Informational messages - debug (7): Debug-level messages Example: `npx transloadit assemblies list -l debug` 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- README.md | 29 +++++++++++++------ src/cli/OutputCtl.ts | 50 ++++++++++++++++++++++++++++----- src/cli/commands/BaseCommand.ts | 24 +++++++--------- test/e2e/cli/OutputCtl.ts | 13 ++++++--- 4 files changed, 83 insertions(+), 33 deletions(-) diff --git a/README.md b/README.md index 24dc1e1f..ef97f7fd 100644 --- a/README.md +++ b/README.md @@ -167,17 +167,30 @@ echo '{"workspace":"my-workspace","template":"my-template","input":"image.jpg"}' All commands support these common options: - `--json, -j` - Output results as JSON (useful for scripting) -- `--verbose, -v` - Enable debug output (shows DEBUG messages in addition to INFO/WARNING/ERROR) -- `--quiet, -q` - Suppress non-essential output (only shows ERROR messages, hides INFO and WARNING) +- `--log-level, -l` - Set log verbosity level (default: notice) - `--help, -h` - Show help for a command -Output levels: +#### Log Levels -| Flag | ERROR | WARNING | INFO | DEBUG | -| ----------- | ----- | ------- | ---- | ----- | -| `--quiet` | ✓ | | | | -| _(default)_ | ✓ | ✓ | ✓ | | -| `--verbose` | ✓ | ✓ | ✓ | ✓ | +The CLI uses [syslog severity levels](https://en.wikipedia.org/wiki/Syslog#Severity_level). Lower = more severe, higher = more verbose: + +| Level | Value | Description | +| -------- | ----- | ------------------------------------- | +| `err` | 3 | Error conditions | +| `warn` | 4 | Warning conditions | +| `notice` | 5 | Normal but significant **(default)** | +| `info` | 6 | Informational messages | +| `debug` | 7 | Debug-level messages | + +Examples: + +```bash +# Show only errors and warnings +npx transloadit assemblies list -l warn + +# Show debug output +npx transloadit assemblies list -l debug +``` ## SDK Usage diff --git a/src/cli/OutputCtl.ts b/src/cli/OutputCtl.ts index 5ff9ef19..bd2383b0 100644 --- a/src/cli/OutputCtl.ts +++ b/src/cli/OutputCtl.ts @@ -1,5 +1,36 @@ +/** + * Log levels following syslog severity (https://en.wikipedia.org/wiki/Syslog#Severity_level) + * Lower numbers = more severe, higher numbers = more verbose + */ +export const LOG_LEVEL = { + ERR: 3, // Error conditions + WARN: 4, // Warning conditions + NOTICE: 5, // Normal but significant (default) + INFO: 6, // Informational + DEBUG: 7, // Debug-level messages +} as const + +export type LogLevelName = keyof typeof LOG_LEVEL +export type LogLevelValue = (typeof LOG_LEVEL)[LogLevelName] + +export const LOG_LEVEL_DEFAULT: LogLevelValue = LOG_LEVEL.NOTICE + +/** Valid log level names for CLI parsing */ +export const LOG_LEVEL_NAMES = Object.keys(LOG_LEVEL).map((k) => k.toLowerCase()) as Lowercase< + LogLevelName +>[] + +/** Parse a log level string to its numeric value */ +export function parseLogLevel(level: string): LogLevelValue { + const upper = level.toUpperCase() as LogLevelName + if (upper in LOG_LEVEL) { + return LOG_LEVEL[upper] + } + throw new Error(`Invalid log level: ${level}. Valid levels: ${LOG_LEVEL_NAMES.join(', ')}`) +} + export interface OutputCtlOptions { - logLevel?: number + logLevel?: LogLevelValue jsonMode?: boolean } @@ -7,6 +38,7 @@ export interface OutputCtlOptions { export interface IOutputCtl { error(msg: unknown): void warn(msg: unknown): void + notice(msg: unknown): void info(msg: unknown): void debug(msg: unknown): void print(simple: unknown, json: unknown): void @@ -14,9 +46,9 @@ export interface IOutputCtl { export default class OutputCtl implements IOutputCtl { private json: boolean - private logLevel: number + private logLevel: LogLevelValue - constructor({ logLevel = 0, jsonMode = false }: OutputCtlOptions = {}) { + constructor({ logLevel = LOG_LEVEL_DEFAULT, jsonMode = false }: OutputCtlOptions = {}) { this.json = jsonMode this.logLevel = logLevel @@ -33,19 +65,23 @@ export default class OutputCtl implements IOutputCtl { } error(msg: unknown): void { - console.error('ERROR ', msg) + if (this.logLevel >= LOG_LEVEL.ERR) console.error('err ', msg) } warn(msg: unknown): void { - if (this.logLevel > 0) console.error('WARNING', msg) + if (this.logLevel >= LOG_LEVEL.WARN) console.error('warn ', msg) + } + + notice(msg: unknown): void { + if (this.logLevel >= LOG_LEVEL.NOTICE) console.error('notice ', msg) } info(msg: unknown): void { - if (this.logLevel > 0) console.error('INFO ', msg) + if (this.logLevel >= LOG_LEVEL.INFO) console.error('info ', msg) } debug(msg: unknown): void { - if (this.logLevel > 1) console.error('DEBUG ', msg) + if (this.logLevel >= LOG_LEVEL.DEBUG) console.error('debug ', msg) } print(simple: unknown, json: unknown): void { diff --git a/src/cli/commands/BaseCommand.ts b/src/cli/commands/BaseCommand.ts index 0d1718fe..6f0f1e2d 100644 --- a/src/cli/commands/BaseCommand.ts +++ b/src/cli/commands/BaseCommand.ts @@ -2,15 +2,16 @@ import process from 'node:process' import { Command, Option } from 'clipanion' import 'dotenv/config' import { Transloadit as TransloaditClient } from '../../Transloadit.ts' -import OutputCtl, { type IOutputCtl } from '../OutputCtl.ts' +import OutputCtl, { + type IOutputCtl, + LOG_LEVEL_DEFAULT, + LOG_LEVEL_NAMES, + parseLogLevel, +} from '../OutputCtl.ts' export abstract class BaseCommand extends Command { - verbose = Option.Boolean('-v,--verbose', false, { - description: 'Enable debug output', - }) - - quiet = Option.Boolean('-q,--quiet', false, { - description: 'Disable warnings', + logLevelOption = Option.String('-l,--log-level', { + description: `Log level: ${LOG_LEVEL_NAMES.join(', ')} (default: notice)`, }) json = Option.Boolean('-j,--json', false, { @@ -20,15 +21,10 @@ export abstract class BaseCommand extends Command { protected output!: IOutputCtl protected client!: TransloaditClient - protected get logLevel(): number { - if (this.verbose) return 2 - if (this.quiet) return 0 - return 1 - } - protected setupOutput(): void { + const logLevel = this.logLevelOption ? parseLogLevel(this.logLevelOption) : LOG_LEVEL_DEFAULT this.output = new OutputCtl({ - logLevel: this.logLevel, + logLevel, jsonMode: this.json, }) } diff --git a/test/e2e/cli/OutputCtl.ts b/test/e2e/cli/OutputCtl.ts index d63cbde5..44bb8b85 100644 --- a/test/e2e/cli/OutputCtl.ts +++ b/test/e2e/cli/OutputCtl.ts @@ -1,7 +1,8 @@ -import type { OutputCtlOptions } from '../../../src/cli/OutputCtl.ts' +import type { LogLevelValue, OutputCtlOptions } from '../../../src/cli/OutputCtl.ts' +import { LOG_LEVEL_DEFAULT } from '../../../src/cli/OutputCtl.ts' interface OutputEntry { - type: 'error' | 'warn' | 'info' | 'debug' | 'print' + type: 'error' | 'warn' | 'notice' | 'info' | 'debug' | 'print' msg: unknown json?: unknown } @@ -14,9 +15,9 @@ export default class OutputCtl { private output: OutputEntry[] // These properties are required by the src/cli/OutputCtl interface but not used in tests private json: boolean - private logLevel: number + private logLevel: LogLevelValue - constructor({ logLevel = 0, jsonMode = false }: OutputCtlOptions = {}) { + constructor({ logLevel = LOG_LEVEL_DEFAULT, jsonMode = false }: OutputCtlOptions = {}) { this.output = [] this.json = jsonMode this.logLevel = logLevel @@ -30,6 +31,10 @@ export default class OutputCtl { this.output.push({ type: 'warn', msg }) } + notice(msg: unknown): void { + this.output.push({ type: 'notice', msg }) + } + info(msg: unknown): void { this.output.push({ type: 'info', msg }) } From 0ca0f87325a7e59d737a684bd8386351cd3710da Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 16:23:12 +0100 Subject: [PATCH 24/45] feat: allow numeric values for --log-level MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Now accepts both level names and numeric values: -l warn OR -l 4 -l debug OR -l 7 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- README.md | 6 ++++-- src/cli/OutputCtl.ts | 21 +++++++++++++++++++-- src/cli/commands/BaseCommand.ts | 2 +- 3 files changed, 24 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index ef97f7fd..11f9f6b6 100644 --- a/README.md +++ b/README.md @@ -167,7 +167,7 @@ echo '{"workspace":"my-workspace","template":"my-template","input":"image.jpg"}' All commands support these common options: - `--json, -j` - Output results as JSON (useful for scripting) -- `--log-level, -l` - Set log verbosity level (default: notice) +- `--log-level, -l` - Set log verbosity level by name or number (default: notice) - `--help, -h` - Show help for a command #### Log Levels @@ -182,14 +182,16 @@ The CLI uses [syslog severity levels](https://en.wikipedia.org/wiki/Syslog#Sever | `info` | 6 | Informational messages | | `debug` | 7 | Debug-level messages | -Examples: +You can use either the level name or its numeric value: ```bash # Show only errors and warnings npx transloadit assemblies list -l warn +npx transloadit assemblies list -l 4 # Show debug output npx transloadit assemblies list -l debug +npx transloadit assemblies list -l 7 ``` ## SDK Usage diff --git a/src/cli/OutputCtl.ts b/src/cli/OutputCtl.ts index bd2383b0..6cd9e25b 100644 --- a/src/cli/OutputCtl.ts +++ b/src/cli/OutputCtl.ts @@ -20,13 +20,30 @@ export const LOG_LEVEL_NAMES = Object.keys(LOG_LEVEL).map((k) => k.toLowerCase() LogLevelName >[] -/** Parse a log level string to its numeric value */ +/** Valid numeric log level values */ +const LOG_LEVEL_VALUES = new Set(Object.values(LOG_LEVEL)) + +/** Parse a log level string (name or number) to its numeric value */ export function parseLogLevel(level: string): LogLevelValue { + // Try parsing as number first + const num = Number(level) + if (!Number.isNaN(num)) { + if (LOG_LEVEL_VALUES.has(num as LogLevelValue)) { + return num as LogLevelValue + } + throw new Error( + `Invalid log level: ${level}. Valid values: ${[...LOG_LEVEL_VALUES].join(', ')} or ${LOG_LEVEL_NAMES.join(', ')}`, + ) + } + + // Try as level name const upper = level.toUpperCase() as LogLevelName if (upper in LOG_LEVEL) { return LOG_LEVEL[upper] } - throw new Error(`Invalid log level: ${level}. Valid levels: ${LOG_LEVEL_NAMES.join(', ')}`) + throw new Error( + `Invalid log level: ${level}. Valid levels: ${LOG_LEVEL_NAMES.join(', ')} or ${[...LOG_LEVEL_VALUES].join(', ')}`, + ) } export interface OutputCtlOptions { diff --git a/src/cli/commands/BaseCommand.ts b/src/cli/commands/BaseCommand.ts index 6f0f1e2d..51f83cbc 100644 --- a/src/cli/commands/BaseCommand.ts +++ b/src/cli/commands/BaseCommand.ts @@ -11,7 +11,7 @@ import OutputCtl, { export abstract class BaseCommand extends Command { logLevelOption = Option.String('-l,--log-level', { - description: `Log level: ${LOG_LEVEL_NAMES.join(', ')} (default: notice)`, + description: `Log level: ${LOG_LEVEL_NAMES.join(', ')} or 3-7 (default: notice)`, }) json = Option.Boolean('-j,--json', false, { From 887ba601038a898af3dc17950ec4423248094594 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 16:25:01 +0100 Subject: [PATCH 25/45] feat: add trace log level (8) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Most verbose level for detailed tracing, matching sev-logger. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- README.md | 1 + src/cli/OutputCtl.ts | 6 ++++++ src/cli/commands/BaseCommand.ts | 2 +- test/e2e/cli/OutputCtl.ts | 6 +++++- 4 files changed, 13 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 11f9f6b6..a92ef3a9 100644 --- a/README.md +++ b/README.md @@ -181,6 +181,7 @@ The CLI uses [syslog severity levels](https://en.wikipedia.org/wiki/Syslog#Sever | `notice` | 5 | Normal but significant **(default)** | | `info` | 6 | Informational messages | | `debug` | 7 | Debug-level messages | +| `trace` | 8 | Most verbose/detailed | You can use either the level name or its numeric value: diff --git a/src/cli/OutputCtl.ts b/src/cli/OutputCtl.ts index 6cd9e25b..cf460e2e 100644 --- a/src/cli/OutputCtl.ts +++ b/src/cli/OutputCtl.ts @@ -8,6 +8,7 @@ export const LOG_LEVEL = { NOTICE: 5, // Normal but significant (default) INFO: 6, // Informational DEBUG: 7, // Debug-level messages + TRACE: 8, // Most verbose/detailed } as const export type LogLevelName = keyof typeof LOG_LEVEL @@ -58,6 +59,7 @@ export interface IOutputCtl { notice(msg: unknown): void info(msg: unknown): void debug(msg: unknown): void + trace(msg: unknown): void print(simple: unknown, json: unknown): void } @@ -101,6 +103,10 @@ export default class OutputCtl implements IOutputCtl { if (this.logLevel >= LOG_LEVEL.DEBUG) console.error('debug ', msg) } + trace(msg: unknown): void { + if (this.logLevel >= LOG_LEVEL.TRACE) console.error('trace ', msg) + } + print(simple: unknown, json: unknown): void { if (this.json) console.log(JSON.stringify(json)) else if (typeof simple === 'string') console.log(simple) diff --git a/src/cli/commands/BaseCommand.ts b/src/cli/commands/BaseCommand.ts index 51f83cbc..78b2f08a 100644 --- a/src/cli/commands/BaseCommand.ts +++ b/src/cli/commands/BaseCommand.ts @@ -11,7 +11,7 @@ import OutputCtl, { export abstract class BaseCommand extends Command { logLevelOption = Option.String('-l,--log-level', { - description: `Log level: ${LOG_LEVEL_NAMES.join(', ')} or 3-7 (default: notice)`, + description: `Log level: ${LOG_LEVEL_NAMES.join(', ')} or 3-8 (default: notice)`, }) json = Option.Boolean('-j,--json', false, { diff --git a/test/e2e/cli/OutputCtl.ts b/test/e2e/cli/OutputCtl.ts index 44bb8b85..1962c541 100644 --- a/test/e2e/cli/OutputCtl.ts +++ b/test/e2e/cli/OutputCtl.ts @@ -2,7 +2,7 @@ import type { LogLevelValue, OutputCtlOptions } from '../../../src/cli/OutputCtl import { LOG_LEVEL_DEFAULT } from '../../../src/cli/OutputCtl.ts' interface OutputEntry { - type: 'error' | 'warn' | 'notice' | 'info' | 'debug' | 'print' + type: 'error' | 'warn' | 'notice' | 'info' | 'debug' | 'trace' | 'print' msg: unknown json?: unknown } @@ -43,6 +43,10 @@ export default class OutputCtl { this.output.push({ type: 'debug', msg }) } + trace(msg: unknown): void { + this.output.push({ type: 'trace', msg }) + } + print(msg: unknown, json?: unknown): void { this.output.push({ type: 'print', msg, json }) } From 0f795cb8d3fb55e7277e0f8e2722dbc7ca758cf5 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 16:55:52 +0100 Subject: [PATCH 26/45] add onPoll --- CHANGELOG.md | 4 +++- README.md | 2 ++ src/Transloadit.ts | 15 +++++++++++++++ src/cli/assemblies-create.ts | 37 ++++++++++++------------------------ test/unit/mock-http.test.ts | 26 +++++++++++++++++++++++++ 5 files changed, 58 insertions(+), 26 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 83546b2d..0851a04f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,9 @@ You may also want to refer to [GitHub releases](https://github.com/transloadit/n ## Unreleased - Add `signal` option to `createAssembly()` for cancelling in-flight HTTP requests and TUS uploads via `AbortController` -- Integrate transloadify CLI into the SDK, providing `assemblies`, `templates`, and `bills` commands +- Add `signal` and `onPoll` options to `awaitAssemblyCompletion()` for cancellation and custom polling control +- Integrate transloadify CLI into the SDK, providing `assemblies`, `templates`, `bills`, and `assembly-notifications` commands +- Add `--log-level (-l)` CLI option using syslog severity levels (err=3, warn=4, notice=5, info=6, debug=7, trace=8) - Apply stricter biome lint rules (noExplicitAny, useAwait, noForEach, noNonNullAssertion) ## v4.0.7 diff --git a/README.md b/README.md index a92ef3a9..99047e56 100644 --- a/README.md +++ b/README.md @@ -457,6 +457,8 @@ This function will continously poll the specified Assembly `assemblyId` and reso - `onAssemblyProgress` - A progress function called on each poll. See `createAssembly` - `timeout` - How many milliseconds until polling times out (default: no timeout) - `interval` - Poll interval in milliseconds (default `1000`) +- `signal` - An `AbortSignal` to cancel polling. When aborted, the promise rejects with an `AbortError`. +- `onPoll` - A callback invoked at the start of each poll iteration. Return `false` to stop polling early and resolve with the last known status. Useful for implementing custom cancellation logic (e.g., superseding assemblies in watch mode). #### getLastUsedAssemblyUrl() diff --git a/src/Transloadit.ts b/src/Transloadit.ts index bdf57c68..5cce9623 100644 --- a/src/Transloadit.ts +++ b/src/Transloadit.ts @@ -107,6 +107,12 @@ export interface AwaitAssemblyCompletionOptions { * When aborted, the polling loop will stop and throw an AbortError. */ signal?: AbortSignal + /** + * Optional callback invoked before each poll iteration. + * Return `false` to stop polling early and return the current assembly status. + * Useful for watch mode where a newer job may supersede the current one. + */ + onPoll?: () => boolean | void } export interface SmartCDNUrlOptions { @@ -368,17 +374,26 @@ export class Transloadit { startTimeMs = getHrTimeMs(), interval = 1000, signal, + onPoll, }: AwaitAssemblyCompletionOptions = {}, ): Promise { assert.ok(assemblyId) + let lastResult: AssemblyStatus | undefined + while (true) { + // Check if caller wants to stop polling early + if (onPoll?.() === false && lastResult) { + return lastResult + } + // Check if aborted before making the request if (signal?.aborted) { throw signal.reason ?? new DOMException('Aborted', 'AbortError') } const result = await this.getAssembly(assemblyId, { signal }) + lastResult = result // If 'ok' is not in result, it implies a terminal state (e.g., error, completed, canceled). // If 'ok' is present, then we check if it's one of the non-terminal polling states. diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index 29db07be..f5b45f6b 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -724,34 +724,21 @@ export default async function run( const assemblyId = result.assembly_id if (!assemblyId) throw new Error('No assembly_id in result') - let assembly: AssemblyStatus = await client.getAssembly(assemblyId, { + // Use SDK's awaitAssemblyCompletion with onPoll to check for superceded jobs + const assembly = await client.awaitAssemblyCompletion(assemblyId, { signal: abortController.signal, + onPoll: () => { + // Return false to stop polling if this job has been superceded (watch mode) + if (superceded) return false + return true + }, + onAssemblyProgress: (status) => { + outputctl.debug(`Assembly status: ${status.ok}`) + }, }) - while ( - assembly.ok !== 'ASSEMBLY_COMPLETED' && - assembly.ok !== 'ASSEMBLY_CANCELED' && - !assembly.error - ) { - if (superceded) return - if (abortController.signal.aborted) { - throw abortController.signal.reason || new Error('Aborted') - } - - outputctl.debug(`Assembly status: ${assembly.ok}`) - await new Promise((resolve, reject) => { - const timer = setTimeout(resolve, 1000) - abortController.signal.addEventListener( - 'abort', - () => { - clearTimeout(timer) - reject(abortController.signal.reason || new Error('Aborted')) - }, - { once: true }, - ) - }) - assembly = await client.getAssembly(assemblyId, { signal: abortController.signal }) - } + // If superceded, exit early without processing results + if (superceded) return if (assembly.error || (assembly.ok && assembly.ok !== 'ASSEMBLY_COMPLETED')) { const msg = `Assembly failed: ${assembly.error || assembly.message} (Status: ${assembly.ok})` diff --git a/test/unit/mock-http.test.ts b/test/unit/mock-http.test.ts index 65b85b63..9f572c18 100644 --- a/test/unit/mock-http.test.ts +++ b/test/unit/mock-http.test.ts @@ -76,6 +76,32 @@ describe('Mocked API tests', () => { scope.persist(false) }) + it('should stop polling early when onPoll returns false', async () => { + const client = getLocalClient() + + let pollCount = 0 + const scope = nock('http://localhost') + .get('/assemblies/1') + .query(() => true) + .reply(200, { ok: 'ASSEMBLY_EXECUTING', assembly_url: '', assembly_ssl_url: '' }) + .persist() + + const result = await client.awaitAssemblyCompletion('1', { + interval: 10, + onPoll: () => { + pollCount++ + // Stop after 3 polls + return pollCount < 3 + }, + }) + + // Should have the last polled status (ASSEMBLY_EXECUTING), not completed + expect((result as { ok: string }).ok).toBe('ASSEMBLY_EXECUTING') + expect(pollCount).toBe(3) + + scope.persist(false) + }) + it('should handle aborted correctly', async () => { const client = getLocalClient() From 8c5ee5d7535b1a0221de82925964303d3c3814dd Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 17:01:23 +0100 Subject: [PATCH 27/45] fix --- src/cli/assemblies-create.ts | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index f5b45f6b..ba58aeee 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -484,6 +484,8 @@ function dismissStaleJobs(jobEmitter: EventEmitter): MyEventEmitter { const emitter = new MyEventEmitter() const jobsPromise = new JobsPromise() + // Errors are already caught in the promises passed to add(), so use a no-op handler + jobsPromise.setErrorHandler(() => {}) jobEmitter.on('end', () => jobsPromise.allSettled().then(() => emitter.emit('end'))) jobEmitter.on('error', (err: Error) => emitter.emit('error', err)) @@ -697,6 +699,11 @@ export default async function run( // AbortController to cancel all in-flight createAssembly calls when an error occurs const abortController = new AbortController() + // Set error handler before subscribing to events that might call add() + jobsPromise.setErrorHandler((err: unknown) => { + outputctl.error(err as Error) + }) + emitter.on('job', (job: Job) => { activeJobs.add(job) const inPath = job.in ? ((job.in as fs.ReadStream).path as string | undefined) : undefined @@ -796,10 +803,6 @@ export default async function run( } }) - jobsPromise.setErrorHandler((err: unknown) => { - outputctl.error(err as Error) - }) - emitter.on('error', (err: Error) => { // Abort all in-flight createAssembly calls to ensure clean shutdown abortController.abort() From ce864581f4ce6c94c4119fd1033f89aa9a724106 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 17:26:50 +0100 Subject: [PATCH 28/45] format --- .gemini/settings.json | 7 ++----- src/Transloadit.ts | 2 +- src/cli/OutputCtl.ts | 6 +++--- src/cli/assemblies-create.ts | 2 +- src/cli/commands/BaseCommand.ts | 8 ++------ test/unit/cli/JobsPromise.test.ts | 4 ++-- 6 files changed, 11 insertions(+), 18 deletions(-) diff --git a/.gemini/settings.json b/.gemini/settings.json index 2704fdc3..20d34743 100644 --- a/.gemini/settings.json +++ b/.gemini/settings.json @@ -2,10 +2,7 @@ "mcpServers": { "playwright": { "command": "npx", - "args": [ - "-y", - "@playwright/mcp@latest" - ] + "args": ["-y", "@playwright/mcp@latest"] } } -} \ No newline at end of file +} diff --git a/src/Transloadit.ts b/src/Transloadit.ts index 5cce9623..4d2c2689 100644 --- a/src/Transloadit.ts +++ b/src/Transloadit.ts @@ -112,7 +112,7 @@ export interface AwaitAssemblyCompletionOptions { * Return `false` to stop polling early and return the current assembly status. * Useful for watch mode where a newer job may supersede the current one. */ - onPoll?: () => boolean | void + onPoll?: () => boolean | undefined } export interface SmartCDNUrlOptions { diff --git a/src/cli/OutputCtl.ts b/src/cli/OutputCtl.ts index cf460e2e..144e4a4c 100644 --- a/src/cli/OutputCtl.ts +++ b/src/cli/OutputCtl.ts @@ -17,9 +17,9 @@ export type LogLevelValue = (typeof LOG_LEVEL)[LogLevelName] export const LOG_LEVEL_DEFAULT: LogLevelValue = LOG_LEVEL.NOTICE /** Valid log level names for CLI parsing */ -export const LOG_LEVEL_NAMES = Object.keys(LOG_LEVEL).map((k) => k.toLowerCase()) as Lowercase< - LogLevelName ->[] +export const LOG_LEVEL_NAMES = Object.keys(LOG_LEVEL).map((k) => + k.toLowerCase(), +) as Lowercase[] /** Valid numeric log level values */ const LOG_LEVEL_VALUES = new Set(Object.values(LOG_LEVEL)) diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index ba58aeee..c841f072 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -10,7 +10,7 @@ import tty from 'node:tty' import { promisify } from 'node:util' import type { StepsInput } from '../alphalib/types/template.ts' import type { CreateAssemblyParams } from '../apiTypes.ts' -import type { AssemblyStatus, CreateAssemblyOptions, Transloadit } from '../Transloadit.ts' +import type { CreateAssemblyOptions, Transloadit } from '../Transloadit.ts' import JobsPromise from './JobsPromise.ts' import type { IOutputCtl } from './OutputCtl.ts' import { isErrnoException } from './types.ts' diff --git a/src/cli/commands/BaseCommand.ts b/src/cli/commands/BaseCommand.ts index 78b2f08a..2e7bf009 100644 --- a/src/cli/commands/BaseCommand.ts +++ b/src/cli/commands/BaseCommand.ts @@ -2,12 +2,8 @@ import process from 'node:process' import { Command, Option } from 'clipanion' import 'dotenv/config' import { Transloadit as TransloaditClient } from '../../Transloadit.ts' -import OutputCtl, { - type IOutputCtl, - LOG_LEVEL_DEFAULT, - LOG_LEVEL_NAMES, - parseLogLevel, -} from '../OutputCtl.ts' +import type { IOutputCtl } from '../OutputCtl.ts' +import OutputCtl, { LOG_LEVEL_DEFAULT, LOG_LEVEL_NAMES, parseLogLevel } from '../OutputCtl.ts' export abstract class BaseCommand extends Command { logLevelOption = Option.String('-l,--log-level', { diff --git a/test/unit/cli/JobsPromise.test.ts b/test/unit/cli/JobsPromise.test.ts index e7c73553..41f15449 100644 --- a/test/unit/cli/JobsPromise.test.ts +++ b/test/unit/cli/JobsPromise.test.ts @@ -30,7 +30,7 @@ describe('JobsPromise', () => { expect(results).toHaveLength(2) }) - it('should throw if error handler is not set and promise rejects', async () => { + it('should throw if error handler is not set and promise rejects', () => { const jobs = new JobsPromise() // Intentionally NOT setting error handler @@ -40,7 +40,7 @@ describe('JobsPromise', () => { // This should throw because no error handler is set expect(() => jobs.add(rejectingPromise)).toThrow( - 'JobsPromise: error handler must be set before adding promises' + 'JobsPromise: error handler must be set before adding promises', ) }) }) From b62dac6a70ee8bdf8da051bf9d9745683f6e686b Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 17:36:04 +0100 Subject: [PATCH 29/45] fix: CLI exits with code 1 when jobs fail, fix AbortSignal listener leak MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add hasFailures tracking to JobsPromise so CLI can detect failures - CLI assemblies create now returns exit code 1 when any job fails - Fix memory leak in awaitAssemblyCompletion: remove abort listener when sleep timeout resolves normally - Update CHANGELOG with better onPoll description 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- CHANGELOG.md | 2 +- src/Transloadit.ts | 22 ++++++++++++---------- src/cli/JobsPromise.ts | 10 ++++++++++ src/cli/assemblies-create.ts | 7 ++++--- src/cli/commands/assemblies.ts | 4 ++-- test/unit/cli/JobsPromise.test.ts | 26 ++++++++++++++++++++++++++ 6 files changed, 55 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0851a04f..7fe50cf7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,7 @@ You may also want to refer to [GitHub releases](https://github.com/transloadit/n ## Unreleased - Add `signal` option to `createAssembly()` for cancelling in-flight HTTP requests and TUS uploads via `AbortController` -- Add `signal` and `onPoll` options to `awaitAssemblyCompletion()` for cancellation and custom polling control +- Add `signal` and `onPoll` options to `awaitAssemblyCompletion()` for cancellation and early termination (useful for custom progress reporting or superseding assemblies in watch mode) - Integrate transloadify CLI into the SDK, providing `assemblies`, `templates`, `bills`, and `assembly-notifications` commands - Add `--log-level (-l)` CLI option using syslog severity levels (err=3, warn=4, notice=5, info=6, debug=7, trace=8) - Apply stricter biome lint rules (noExplicitAny, useAwait, noForEach, noNonNullAssertion) diff --git a/src/Transloadit.ts b/src/Transloadit.ts index 4d2c2689..f360c438 100644 --- a/src/Transloadit.ts +++ b/src/Transloadit.ts @@ -422,17 +422,19 @@ export class Transloadit { throw new PollingTimeoutError('Polling timed out') } - // Make the sleep abortable + // Make the sleep abortable, ensuring listener cleanup to prevent memory leaks await new Promise((resolve, reject) => { - const timeoutId = setTimeout(resolve, interval) - signal?.addEventListener( - 'abort', - () => { - clearTimeout(timeoutId) - reject(signal.reason ?? new DOMException('Aborted', 'AbortError')) - }, - { once: true }, - ) + const timeoutId = setTimeout(() => { + signal?.removeEventListener('abort', onAbort) + resolve() + }, interval) + + function onAbort() { + clearTimeout(timeoutId) + reject(signal?.reason ?? new DOMException('Aborted', 'AbortError')) + } + + signal?.addEventListener('abort', onAbort, { once: true }) }) } } diff --git a/src/cli/JobsPromise.ts b/src/cli/JobsPromise.ts index dbbfe703..9ecafca4 100644 --- a/src/cli/JobsPromise.ts +++ b/src/cli/JobsPromise.ts @@ -3,10 +3,12 @@ * Used to run multiple async operations in parallel while: * 1. Reporting errors as they happen (via onError callback) * 2. Waiting for all operations to complete at the end + * 3. Tracking whether any failures occurred */ export default class JobsPromise { private promises: Set> = new Set() private onError: ((err: unknown) => void) | null = null + private _hasFailures = false /** * Set the error handler for individual promise rejections. @@ -29,6 +31,7 @@ export default class JobsPromise { const errorHandler = this.onError promise .catch((err: unknown) => { + this._hasFailures = true errorHandler(err) }) .finally(() => { @@ -36,6 +39,13 @@ export default class JobsPromise { }) } + /** + * Returns true if any tracked promise has rejected. + */ + get hasFailures(): boolean { + return this._hasFailures + } + /** * Wait for all tracked promises to settle. * Returns array of fulfilled values (rejects are filtered out diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index c841f072..83e1e20a 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -615,7 +615,7 @@ export default async function run( del, reprocessStale, }: AssembliesCreateOptions, -): Promise { +): Promise<{ results: unknown[]; hasFailures: boolean }> { // Quick fix for https://github.com/transloadit/transloadify/issues/13 // Only default to stdout when output is undefined (not provided), not when explicitly null let resolvedOutput = output @@ -811,8 +811,9 @@ export default async function run( reject(err) }) - emitter.on('end', () => { - resolve(jobsPromise.allSettled()) + emitter.on('end', async () => { + const results = await jobsPromise.allSettled() + resolve({ results, hasFailures: jobsPromise.hasFailures }) }) }) } diff --git a/src/cli/commands/assemblies.ts b/src/cli/commands/assemblies.ts index cb569426..092a324d 100644 --- a/src/cli/commands/assemblies.ts +++ b/src/cli/commands/assemblies.ts @@ -104,7 +104,7 @@ export class AssembliesCreateCommand extends AuthenticatedCommand { fieldsMap[key] = value } - await assembliesCreate(this.output, this.client, { + const { hasFailures } = await assembliesCreate(this.output, this.client, { steps: this.steps, template: this.template, fields: fieldsMap, @@ -115,7 +115,7 @@ export class AssembliesCreateCommand extends AuthenticatedCommand { del: this.deleteAfterProcessing, reprocessStale: this.reprocessStale, }) - return undefined + return hasFailures ? 1 : undefined } } diff --git a/test/unit/cli/JobsPromise.test.ts b/test/unit/cli/JobsPromise.test.ts index 41f15449..d86d449c 100644 --- a/test/unit/cli/JobsPromise.test.ts +++ b/test/unit/cli/JobsPromise.test.ts @@ -43,4 +43,30 @@ describe('JobsPromise', () => { 'JobsPromise: error handler must be set before adding promises', ) }) + + it('should track hasFailures when promise rejects', async () => { + const jobs = new JobsPromise() + jobs.setErrorHandler(() => {}) + + expect(jobs.hasFailures).toBe(false) + + jobs.add(Promise.resolve('ok')) + jobs.add(Promise.reject(new Error('fail'))) + + await jobs.allSettled() + + expect(jobs.hasFailures).toBe(true) + }) + + it('should have hasFailures false when all succeed', async () => { + const jobs = new JobsPromise() + jobs.setErrorHandler(() => {}) + + jobs.add(Promise.resolve('a')) + jobs.add(Promise.resolve('b')) + + await jobs.allSettled() + + expect(jobs.hasFailures).toBe(false) + }) }) From f22b4a170fd2285d109c1e3a253b7e23a9394332 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 18:22:37 +0100 Subject: [PATCH 30/45] feat(cli): add --endpoint option and --single-assembly flag MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add --endpoint option to all commands for custom API endpoint (also reads from TRANSLOADIT_ENDPOINT env var) - Add --single-assembly flag to assemblies create command Passes all input files to a single assembly instead of creating one assembly per file. Cannot be used with --watch. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/cli/assemblies-create.ts | 311 ++++++++++++++++++++++---------- src/cli/commands/BaseCommand.ts | 8 + src/cli/commands/assemblies.ts | 10 + 3 files changed, 235 insertions(+), 94 deletions(-) diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index 83e1e20a..5da8f171 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -599,6 +599,7 @@ export interface AssembliesCreateOptions { output?: string | null del?: boolean reprocessStale?: boolean + singleAssembly?: boolean } export default async function run( @@ -614,6 +615,7 @@ export default async function run( output, del, reprocessStale, + singleAssembly, }: AssembliesCreateOptions, ): Promise<{ results: unknown[]; hasFailures: boolean }> { // Quick fix for https://github.com/transloadit/transloadify/issues/13 @@ -704,116 +706,237 @@ export default async function run( outputctl.error(err as Error) }) - emitter.on('job', (job: Job) => { - activeJobs.add(job) - const inPath = job.in ? ((job.in as fs.ReadStream).path as string | undefined) : undefined - const outPath = job.out?.path - outputctl.debug(`GOT JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) + if (singleAssembly) { + // Single-assembly mode: collect all jobs, then create one assembly with all inputs + const collectedJobs: Job[] = [] - let superceded = false - if (job.out != null) - job.out.on('finish', () => { - superceded = true - }) + emitter.on('job', (job: Job) => { + const inPath = job.in ? ((job.in as fs.ReadStream).path as string | undefined) : undefined + outputctl.debug(`COLLECTING JOB ${inPath ?? 'null'}`) + collectedJobs.push(job) + }) - const createOptions: CreateAssemblyOptions = { - params, - signal: abortController.signal, - } - if (job.in != null) { - createOptions.uploads = { in: job.in } - } + emitter.on('error', (err: Error) => { + abortController.abort() + outputctl.error(err) + reject(err) + }) - const jobPromise = (async () => { - const result = await client.createAssembly(createOptions) - if (superceded) return + emitter.on('end', async () => { + if (collectedJobs.length === 0) { + resolve({ results: [], hasFailures: false }) + return + } - const assemblyId = result.assembly_id - if (!assemblyId) throw new Error('No assembly_id in result') + // Build uploads object with all input files + const uploads: Record = {} + const inputPaths: string[] = [] + for (const job of collectedJobs) { + if (job.in != null) { + const inPath = (job.in as fs.ReadStream).path as string + const basename = path.basename(inPath) + // Use a unique key if there are name collisions + let key = basename + let counter = 1 + while (key in uploads) { + key = `${path.parse(basename).name}_${counter}${path.parse(basename).ext}` + counter++ + } + uploads[key] = job.in + inputPaths.push(inPath) + } + } - // Use SDK's awaitAssemblyCompletion with onPoll to check for superceded jobs - const assembly = await client.awaitAssemblyCompletion(assemblyId, { - signal: abortController.signal, - onPoll: () => { - // Return false to stop polling if this job has been superceded (watch mode) - if (superceded) return false - return true - }, - onAssemblyProgress: (status) => { - outputctl.debug(`Assembly status: ${status.ok}`) - }, - }) + outputctl.debug(`Creating single assembly with ${Object.keys(uploads).length} files`) + + const singleAssemblyPromise = (async () => { + const createOptions: CreateAssemblyOptions = { + params, + signal: abortController.signal, + } + if (Object.keys(uploads).length > 0) { + createOptions.uploads = uploads + } + + const result = await client.createAssembly(createOptions) + const assemblyId = result.assembly_id + if (!assemblyId) throw new Error('No assembly_id in result') + + const assembly = await client.awaitAssemblyCompletion(assemblyId, { + signal: abortController.signal, + onAssemblyProgress: (status) => { + outputctl.debug(`Assembly status: ${status.ok}`) + }, + }) - // If superceded, exit early without processing results - if (superceded) return + if (assembly.error || (assembly.ok && assembly.ok !== 'ASSEMBLY_COMPLETED')) { + const msg = `Assembly failed: ${assembly.error || assembly.message} (Status: ${assembly.ok})` + outputctl.error(msg) + throw new Error(msg) + } + + // Download all results + if (assembly.results && resolvedOutput != null) { + for (const [stepName, stepResults] of Object.entries(assembly.results)) { + for (const stepResult of stepResults) { + const resultUrl = stepResult.url + if (!resultUrl) continue + + // Determine output path + let outPath: string + if (outstat?.isDirectory()) { + outPath = path.join(resolvedOutput, stepResult.name || `${stepName}_result`) + } else { + outPath = resolvedOutput + } + + outputctl.debug(`DOWNLOADING ${stepResult.name} to ${outPath}`) + await new Promise((dlResolve, dlReject) => { + const get = resultUrl.startsWith('https') ? https.get : http.get + get(resultUrl, (res) => { + if (res.statusCode !== 200) { + const msg = `Server returned http status ${res.statusCode}` + outputctl.error(msg) + return dlReject(new Error(msg)) + } + const outStream = fs.createWriteStream(outPath) + res.pipe(outStream) + outStream.on('finish', () => dlResolve()) + outStream.on('error', dlReject) + }).on('error', (err) => { + outputctl.error(err.message) + dlReject(err) + }) + }) + } + } + } + + // Delete input files if requested + if (del) { + for (const inPath of inputPaths) { + await fsp.unlink(inPath) + } + } + })() + + jobsPromise.add(singleAssemblyPromise) + const results = await jobsPromise.allSettled() + resolve({ results, hasFailures: jobsPromise.hasFailures }) + }) + } else { + // Default mode: one assembly per file + emitter.on('job', (job: Job) => { + activeJobs.add(job) + const inPath = job.in ? ((job.in as fs.ReadStream).path as string | undefined) : undefined + const outPath = job.out?.path + outputctl.debug(`GOT JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) - if (assembly.error || (assembly.ok && assembly.ok !== 'ASSEMBLY_COMPLETED')) { - const msg = `Assembly failed: ${assembly.error || assembly.message} (Status: ${assembly.ok})` - outputctl.error(msg) - throw new Error(msg) - } + let superceded = false + if (job.out != null) + job.out.on('finish', () => { + superceded = true + }) - if (!assembly.results) throw new Error('No results in assembly') - const resultsKeys = Object.keys(assembly.results) - const firstKey = resultsKeys[0] - if (!firstKey) throw new Error('No results in assembly') - const firstResult = assembly.results[firstKey] - if (!firstResult || !firstResult[0]) throw new Error('No results in assembly') - const resulturl = firstResult[0].url - - if (job.out != null && resulturl) { - outputctl.debug('DOWNLOADING') - await new Promise((resolve, reject) => { - const get = resulturl.startsWith('https') ? https.get : http.get - get(resulturl, (res) => { - if (res.statusCode !== 200) { - const msg = `Server returned http status ${res.statusCode}` - outputctl.error(msg) - return reject(new Error(msg)) - } + const createOptions: CreateAssemblyOptions = { + params, + signal: abortController.signal, + } + if (job.in != null) { + createOptions.uploads = { in: job.in } + } - if (superceded) return resolve() + const jobPromise = (async () => { + const result = await client.createAssembly(createOptions) + if (superceded) return + + const assemblyId = result.assembly_id + if (!assemblyId) throw new Error('No assembly_id in result') + + // Use SDK's awaitAssemblyCompletion with onPoll to check for superceded jobs + const assembly = await client.awaitAssemblyCompletion(assemblyId, { + signal: abortController.signal, + onPoll: () => { + // Return false to stop polling if this job has been superceded (watch mode) + if (superceded) return false + return true + }, + onAssemblyProgress: (status) => { + outputctl.debug(`Assembly status: ${status.ok}`) + }, + }) - if (!job.out) { - return reject(new Error('Job output stream is undefined')) - } - res.pipe(job.out) - job.out.on('finish', () => res.unpipe()) - res.on('end', () => resolve()) - }).on('error', (err) => { - outputctl.error(err.message) - reject(err) + // If superceded, exit early without processing results + if (superceded) return + + if (assembly.error || (assembly.ok && assembly.ok !== 'ASSEMBLY_COMPLETED')) { + const msg = `Assembly failed: ${assembly.error || assembly.message} (Status: ${assembly.ok})` + outputctl.error(msg) + throw new Error(msg) + } + + if (!assembly.results) throw new Error('No results in assembly') + const resultsKeys = Object.keys(assembly.results) + const firstKey = resultsKeys[0] + if (!firstKey) throw new Error('No results in assembly') + const firstResult = assembly.results[firstKey] + if (!firstResult || !firstResult[0]) throw new Error('No results in assembly') + const resulturl = firstResult[0].url + + if (job.out != null && resulturl) { + outputctl.debug('DOWNLOADING') + await new Promise((resolve, reject) => { + const get = resulturl.startsWith('https') ? https.get : http.get + get(resulturl, (res) => { + if (res.statusCode !== 200) { + const msg = `Server returned http status ${res.statusCode}` + outputctl.error(msg) + return reject(new Error(msg)) + } + + if (superceded) return resolve() + + if (!job.out) { + return reject(new Error('Job output stream is undefined')) + } + res.pipe(job.out) + job.out.on('finish', () => res.unpipe()) + res.on('end', () => resolve()) + }).on('error', (err) => { + outputctl.error(err.message) + reject(err) + }) }) - }) - } - await completeJob() - })() + } + await completeJob() + })() - jobsPromise.add(jobPromise) + jobsPromise.add(jobPromise) - async function completeJob(): Promise { - activeJobs.delete(job) - const inPath = job.in ? ((job.in as fs.ReadStream).path as string | undefined) : undefined - const outPath = job.out?.path - outputctl.debug(`COMPLETED ${inPath ?? 'null'} ${outPath ?? 'null'}`) + async function completeJob(): Promise { + activeJobs.delete(job) + const inPath = job.in ? ((job.in as fs.ReadStream).path as string | undefined) : undefined + const outPath = job.out?.path + outputctl.debug(`COMPLETED ${inPath ?? 'null'} ${outPath ?? 'null'}`) - if (del && job.in != null && inPath) { - await fsp.unlink(inPath) + if (del && job.in != null && inPath) { + await fsp.unlink(inPath) + } } - } - }) + }) - emitter.on('error', (err: Error) => { - // Abort all in-flight createAssembly calls to ensure clean shutdown - abortController.abort() - activeJobs.clear() - outputctl.error(err) - reject(err) - }) + emitter.on('error', (err: Error) => { + // Abort all in-flight createAssembly calls to ensure clean shutdown + abortController.abort() + activeJobs.clear() + outputctl.error(err) + reject(err) + }) - emitter.on('end', async () => { - const results = await jobsPromise.allSettled() - resolve({ results, hasFailures: jobsPromise.hasFailures }) - }) + emitter.on('end', async () => { + const results = await jobsPromise.allSettled() + resolve({ results, hasFailures: jobsPromise.hasFailures }) + }) + } }) } diff --git a/src/cli/commands/BaseCommand.ts b/src/cli/commands/BaseCommand.ts index 2e7bf009..ee287239 100644 --- a/src/cli/commands/BaseCommand.ts +++ b/src/cli/commands/BaseCommand.ts @@ -14,6 +14,11 @@ export abstract class BaseCommand extends Command { description: 'Output in JSON format', }) + endpoint = Option.String('--endpoint', { + description: + 'API endpoint URL (default: https://api2.transloadit.com, or TRANSLOADIT_ENDPOINT env var)', + }) + protected output!: IOutputCtl protected client!: TransloaditClient @@ -33,9 +38,12 @@ export abstract class BaseCommand extends Command { return false } + const endpoint = this.endpoint || process.env.TRANSLOADIT_ENDPOINT + this.client = new TransloaditClient({ authKey: process.env.TRANSLOADIT_KEY, authSecret: process.env.TRANSLOADIT_SECRET, + ...(endpoint && { endpoint }), }) return true } diff --git a/src/cli/commands/assemblies.ts b/src/cli/commands/assemblies.ts index 092a324d..e2c13476 100644 --- a/src/cli/commands/assemblies.ts +++ b/src/cli/commands/assemblies.ts @@ -71,6 +71,10 @@ export class AssembliesCreateCommand extends AuthenticatedCommand { description: 'Process inputs even if output is newer', }) + singleAssembly = Option.Boolean('--single-assembly', false, { + description: 'Pass all input files to a single assembly instead of one assembly per file', + }) + protected async run(): Promise { if (!this.steps && !this.template) { this.output.error('assemblies create requires exactly one of either --steps or --template') @@ -104,6 +108,11 @@ export class AssembliesCreateCommand extends AuthenticatedCommand { fieldsMap[key] = value } + if (this.singleAssembly && this.watch) { + this.output.error('--single-assembly cannot be used with --watch') + return 1 + } + const { hasFailures } = await assembliesCreate(this.output, this.client, { steps: this.steps, template: this.template, @@ -114,6 +123,7 @@ export class AssembliesCreateCommand extends AuthenticatedCommand { output: this.outputPath ?? null, del: this.deleteAfterProcessing, reprocessStale: this.reprocessStale, + singleAssembly: this.singleAssembly, }) return hasFailures ? 1 : undefined } From 447f73ff8cdc453c59d4fc6137d41258a2acee6b Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 18:24:07 +0100 Subject: [PATCH 31/45] docs: document --endpoint and --single-assembly CLI options MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- CHANGELOG.md | 2 ++ README.md | 8 ++++++++ 2 files changed, 10 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7fe50cf7..328b4c45 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,8 @@ You may also want to refer to [GitHub releases](https://github.com/transloadit/n - Add `signal` and `onPoll` options to `awaitAssemblyCompletion()` for cancellation and early termination (useful for custom progress reporting or superseding assemblies in watch mode) - Integrate transloadify CLI into the SDK, providing `assemblies`, `templates`, `bills`, and `assembly-notifications` commands - Add `--log-level (-l)` CLI option using syslog severity levels (err=3, warn=4, notice=5, info=6, debug=7, trace=8) +- Add `--endpoint` CLI option for custom API endpoint (also reads `TRANSLOADIT_ENDPOINT` env var) +- Add `--single-assembly` flag to `assemblies create` for passing multiple input files to a single assembly - Apply stricter biome lint rules (noExplicitAny, useAwait, noForEach, noNonNullAssertion) ## v4.0.7 diff --git a/README.md b/README.md index 99047e56..7b057241 100644 --- a/README.md +++ b/README.md @@ -76,6 +76,9 @@ npx transloadit assemblies create --template YOUR_TEMPLATE_ID --input images/ -- # Process recursively with file watching npx transloadit assemblies create --template YOUR_TEMPLATE_ID --input images/ --output thumbs/ --recursive --watch + +# Process multiple files in a single assembly +npx transloadit assemblies create --template YOUR_TEMPLATE_ID --input file1.jpg --input file2.jpg --output results/ --single-assembly ``` ### Managing Assemblies @@ -168,8 +171,13 @@ All commands support these common options: - `--json, -j` - Output results as JSON (useful for scripting) - `--log-level, -l` - Set log verbosity level by name or number (default: notice) +- `--endpoint` - Custom API endpoint URL (or set `TRANSLOADIT_ENDPOINT` env var) - `--help, -h` - Show help for a command +The `assemblies create` command additionally supports: + +- `--single-assembly` - Pass all input files to a single assembly instead of one assembly per file + #### Log Levels The CLI uses [syslog severity levels](https://en.wikipedia.org/wiki/Syslog#Severity_level). Lower = more severe, higher = more verbose: From 85c06b916d00a2651357f5909d97586f83e34b8c Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 19:26:26 +0100 Subject: [PATCH 32/45] fix: handle templates with no steps in sync download MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When downloading a template that has no steps defined, `result.content.steps` is undefined. JSON.stringify strips undefined values, so after writing and reading the file back, the `steps` property would be missing. Use nullish coalescing to default to empty object: `steps ?? {}` 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/cli/templates.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/cli/templates.ts b/src/cli/templates.ts index da9b81bc..6a15db92 100644 --- a/src/cli/templates.ts +++ b/src/cli/templates.ts @@ -317,7 +317,8 @@ export async function sync( const result = await client.getTemplate(templateId) - template.data.steps = result.content.steps + // Use empty object if template has no steps (undefined would be stripped by JSON.stringify) + template.data.steps = result.content.steps ?? {} const file = path.join(path.dirname(template.file), `${result.name}.json`) await fsp.writeFile(template.file, JSON.stringify(template.data)) From c364b3b0e1d4dbf77bd765c171904db6ba186782 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 19:30:47 +0100 Subject: [PATCH 33/45] refactor: use tryCatch utility for cleaner error handling MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replace try-catch blocks with tryCatch() in assemblies-create.ts for simple "stat file, use default on error" patterns. This reduces boilerplate and improves readability. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/cli/assemblies-create.ts | 29 ++++++++--------------------- 1 file changed, 8 insertions(+), 21 deletions(-) diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index 5da8f171..16c0cf8b 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -8,6 +8,7 @@ import process from 'node:process' import type { Readable, Writable } from 'node:stream' import tty from 'node:tty' import { promisify } from 'node:util' +import { tryCatch } from '../alphalib/tryCatch.ts' import type { StepsInput } from '../alphalib/types/template.ts' import type { CreateAssemblyParams } from '../apiTypes.ts' import type { CreateAssemblyOptions, Transloadit } from '../Transloadit.ts' @@ -130,13 +131,8 @@ function dirProvider(output: string): OutstreamProvider { const outdir = path.dirname(outpath) await ensureDir(outdir) - let mtime: Date - try { - const stats = await fsp.stat(outpath) - mtime = stats.mtime - } catch (_err) { - mtime = new Date(0) - } + const [, stats] = await tryCatch(fsp.stat(outpath)) + const mtime = stats?.mtime ?? new Date(0) const outstream = fs.createWriteStream(outpath) as OutStream // Attach a no-op error handler to prevent unhandled errors if stream is destroyed // before being consumed (e.g., due to output collision detection) @@ -152,13 +148,8 @@ function fileProvider(output: string): OutstreamProvider { await dirExistsP if (output === '-') return process.stdout as OutStream - let mtime: Date - try { - const stats = await fsp.stat(output) - mtime = stats.mtime - } catch (_err) { - mtime = new Date(0) - } + const [, stats] = await tryCatch(fsp.stat(output)) + const mtime = stats?.mtime ?? new Date(0) const outstream = fs.createWriteStream(output) as OutStream // Attach a no-op error handler to prevent unhandled errors if stream is destroyed // before being consumed (e.g., due to output collision detection) @@ -651,13 +642,9 @@ export default async function run( // Determine output stat async before entering the Promise constructor let outstat: StatLike | undefined if (resolvedOutput != null) { - try { - outstat = await myStat(process.stdout, resolvedOutput) - } catch (e) { - if (!isErrnoException(e)) throw e - if (e.code !== 'ENOENT') throw e - outstat = { isDirectory: () => false } - } + const [err, stat] = await tryCatch(myStat(process.stdout, resolvedOutput)) + if (err && (!isErrnoException(err) || err.code !== 'ENOENT')) throw err + outstat = stat ?? { isDirectory: () => false } if (!outstat.isDirectory() && inputs.length !== 0) { const firstInput = inputs[0] From 243852df0d1aeb34c75b1318c1bb736c80dc4bc9 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 19:38:01 +0100 Subject: [PATCH 34/45] fix: remove non-functional notifications list, add download abort signal MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Remove assembly-notifications list command (Transloadit API doesn't have a list notifications endpoint, only replay) - Remove the stub implementation and test - Update README to remove the non-existent list command - Add AbortSignal to https.get downloads so they can be cancelled when errors occur or the process is aborted 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- README.md | 6 ++-- src/cli/assemblies-create.ts | 12 ++++--- src/cli/commands/index.ts | 3 +- src/cli/commands/notifications.ts | 50 ------------------------------ src/cli/notifications.ts | 15 --------- test/e2e/cli/notifications.test.ts | 20 ------------ 6 files changed, 12 insertions(+), 94 deletions(-) delete mode 100644 test/e2e/cli/notifications.test.ts diff --git a/README.md b/README.md index 7b057241..27c4c12e 100644 --- a/README.md +++ b/README.md @@ -145,11 +145,11 @@ npx transloadit bills get 2024-01 --json ### Assembly Notifications ```bash -# List notifications for an assembly -npx transloadit assembly-notifications list ASSEMBLY_ID - # Replay a notification npx transloadit assembly-notifications replay ASSEMBLY_ID + +# Replay to a different URL +npx transloadit assembly-notifications replay --notify-url https://example.com/hook ASSEMBLY_ID ``` ### Signature Generation diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index 16c0cf8b..e6d5f9db 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -780,7 +780,7 @@ export default async function run( outputctl.debug(`DOWNLOADING ${stepResult.name} to ${outPath}`) await new Promise((dlResolve, dlReject) => { const get = resultUrl.startsWith('https') ? https.get : http.get - get(resultUrl, (res) => { + const req = get(resultUrl, { signal: abortController.signal }, (res) => { if (res.statusCode !== 200) { const msg = `Server returned http status ${res.statusCode}` outputctl.error(msg) @@ -790,7 +790,9 @@ export default async function run( res.pipe(outStream) outStream.on('finish', () => dlResolve()) outStream.on('error', dlReject) - }).on('error', (err) => { + }) + req.on('error', (err) => { + if (err.name === 'AbortError') return dlResolve() outputctl.error(err.message) dlReject(err) }) @@ -874,7 +876,7 @@ export default async function run( outputctl.debug('DOWNLOADING') await new Promise((resolve, reject) => { const get = resulturl.startsWith('https') ? https.get : http.get - get(resulturl, (res) => { + const req = get(resulturl, { signal: abortController.signal }, (res) => { if (res.statusCode !== 200) { const msg = `Server returned http status ${res.statusCode}` outputctl.error(msg) @@ -889,7 +891,9 @@ export default async function run( res.pipe(job.out) job.out.on('finish', () => res.unpipe()) res.on('end', () => resolve()) - }).on('error', (err) => { + }) + req.on('error', (err) => { + if (err.name === 'AbortError') return resolve() outputctl.error(err.message) reject(err) }) diff --git a/src/cli/commands/index.ts b/src/cli/commands/index.ts index 83e5a099..5837d5a9 100644 --- a/src/cli/commands/index.ts +++ b/src/cli/commands/index.ts @@ -14,7 +14,7 @@ import { SignatureCommand, SmartCdnSignatureCommand } from './auth.ts' import { BillsGetCommand } from './bills.ts' -import { NotificationsListCommand, NotificationsReplayCommand } from './notifications.ts' +import { NotificationsReplayCommand } from './notifications.ts' import { TemplatesCreateCommand, @@ -60,7 +60,6 @@ export function createCli(): Cli { // Notifications commands cli.register(NotificationsReplayCommand) - cli.register(NotificationsListCommand) return cli } diff --git a/src/cli/commands/notifications.ts b/src/cli/commands/notifications.ts index 183711db..32647d76 100644 --- a/src/cli/commands/notifications.ts +++ b/src/cli/commands/notifications.ts @@ -37,53 +37,3 @@ export class NotificationsReplayCommand extends AuthenticatedCommand { return undefined } } - -export class NotificationsListCommand extends AuthenticatedCommand { - static override paths = [ - ['assembly-notifications', 'list'], - ['notifications', 'list'], - ['notification', 'list'], - ['n', 'list'], - ['n', 'l'], - ] - - static override usage = Command.Usage({ - category: 'Notifications', - description: 'List notifications matching given criteria', - details: ` - If ASSEMBLY is specified, return only notifications sent for that assembly. - `, - examples: [ - ['List all notifications', 'transloadit assembly-notifications list'], - ['List failed notifications', 'transloadit assembly-notifications list --failed'], - ['List for specific assembly', 'transloadit assembly-notifications list ASSEMBLY_ID'], - ], - }) - - failed = Option.Boolean('--failed', false, { - description: 'Return only failed notifications', - }) - - successful = Option.Boolean('--successful', false, { - description: 'Return only successful notifications', - }) - - assemblyId = Option.String({ required: false }) - - protected async run(): Promise { - if (this.failed && this.successful) { - this.output.error('assembly-notifications accepts at most one of --failed and --successful') - return 1 - } - - let type: string | undefined - if (this.failed) type = 'failed' - else if (this.successful) type = 'successful' - - await notifications.list(this.output, this.client, { - type, - assembly_id: this.assemblyId, - }) - return undefined - } -} diff --git a/src/cli/notifications.ts b/src/cli/notifications.ts index a0439730..8a32bac6 100644 --- a/src/cli/notifications.ts +++ b/src/cli/notifications.ts @@ -8,12 +8,6 @@ export interface NotificationsReplayOptions { assemblies: string[] } -export interface NotificationsListOptions { - type?: string - assembly_id?: string - pagesize?: number -} - export async function replay( output: IOutputCtl, client: Transloadit, @@ -25,12 +19,3 @@ export async function replay( output.error(ensureError(err).message) } } - -export function list( - output: IOutputCtl, - _client: Transloadit, - { type: _type, assembly_id: _assembly_id }: NotificationsListOptions, -): Promise { - output.error('List notifications is not supported in this version') - return Promise.resolve() -} diff --git a/test/e2e/cli/notifications.test.ts b/test/e2e/cli/notifications.test.ts deleted file mode 100644 index b7d060b9..00000000 --- a/test/e2e/cli/notifications.test.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { describe, expect, it } from 'vitest' -import * as notifications from '../../../src/cli/notifications.ts' -import OutputCtl from './OutputCtl.ts' -import type { OutputEntry } from './test-utils.ts' -import { testCase } from './test-utils.ts' - -describe('assembly-notifications', () => { - describe('list', () => { - // Skipped: notifications.list is not implemented in the SDK - it.skip( - 'should list notifications', - testCase(async (client) => { - const output = new OutputCtl() - await notifications.list(output, client, { pagesize: 1 }) - const logs = output.get() as OutputEntry[] - expect(logs.filter((l) => l.type === 'error')).to.have.lengthOf(0) - }), - ) - }) -}) From 60273d6b090ea03e2321f97f04ec8507f3f77b5b Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 19:42:21 +0100 Subject: [PATCH 35/45] refactor: use got.stream + pipeline for downloads MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replace http/https.get with got.stream() and Node's pipeline(). This is cleaner, more consistent with the rest of the SDK (which uses got), and reduces callback nesting. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/cli/assemblies-create.ts | 69 ++++++++++++------------------------ 1 file changed, 23 insertions(+), 46 deletions(-) diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index e6d5f9db..f1bd2d1b 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -1,13 +1,13 @@ import EventEmitter from 'node:events' import fs from 'node:fs' import fsp from 'node:fs/promises' -import http from 'node:http' -import https from 'node:https' import path from 'node:path' import process from 'node:process' import type { Readable, Writable } from 'node:stream' +import { pipeline } from 'node:stream/promises' import tty from 'node:tty' import { promisify } from 'node:util' +import got from 'got' import { tryCatch } from '../alphalib/tryCatch.ts' import type { StepsInput } from '../alphalib/types/template.ts' import type { CreateAssemblyParams } from '../apiTypes.ts' @@ -778,25 +778,17 @@ export default async function run( } outputctl.debug(`DOWNLOADING ${stepResult.name} to ${outPath}`) - await new Promise((dlResolve, dlReject) => { - const get = resultUrl.startsWith('https') ? https.get : http.get - const req = get(resultUrl, { signal: abortController.signal }, (res) => { - if (res.statusCode !== 200) { - const msg = `Server returned http status ${res.statusCode}` - outputctl.error(msg) - return dlReject(new Error(msg)) - } - const outStream = fs.createWriteStream(outPath) - res.pipe(outStream) - outStream.on('finish', () => dlResolve()) - outStream.on('error', dlReject) - }) - req.on('error', (err) => { - if (err.name === 'AbortError') return dlResolve() - outputctl.error(err.message) - dlReject(err) - }) - }) + const [dlErr] = await tryCatch( + pipeline( + got.stream(resultUrl, { signal: abortController.signal }), + fs.createWriteStream(outPath), + ), + ) + if (dlErr) { + if (dlErr.name === 'AbortError') continue + outputctl.error(dlErr.message) + throw dlErr + } } } } @@ -872,32 +864,17 @@ export default async function run( if (!firstResult || !firstResult[0]) throw new Error('No results in assembly') const resulturl = firstResult[0].url - if (job.out != null && resulturl) { + if (job.out != null && resulturl && !superceded) { outputctl.debug('DOWNLOADING') - await new Promise((resolve, reject) => { - const get = resulturl.startsWith('https') ? https.get : http.get - const req = get(resulturl, { signal: abortController.signal }, (res) => { - if (res.statusCode !== 200) { - const msg = `Server returned http status ${res.statusCode}` - outputctl.error(msg) - return reject(new Error(msg)) - } - - if (superceded) return resolve() - - if (!job.out) { - return reject(new Error('Job output stream is undefined')) - } - res.pipe(job.out) - job.out.on('finish', () => res.unpipe()) - res.on('end', () => resolve()) - }) - req.on('error', (err) => { - if (err.name === 'AbortError') return resolve() - outputctl.error(err.message) - reject(err) - }) - }) + const [dlErr] = await tryCatch( + pipeline(got.stream(resulturl, { signal: abortController.signal }), job.out), + ) + if (dlErr) { + if (dlErr.name !== 'AbortError') { + outputctl.error(dlErr.message) + throw dlErr + } + } } await completeJob() })() From d73f212d9ed9a8d1793bc5ca3961a3e79fac5a2e Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Wed, 3 Dec 2025 19:45:24 +0100 Subject: [PATCH 36/45] test: add e2e test verifying download integrity via md5 hash MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add test that verifies downloaded file md5 matches the md5hash from the assembly result - Return assembly status from job promises so tests can access result metadata like md5hash 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/cli/assemblies-create.ts | 2 ++ test/e2e/cli/assemblies.test.ts | 34 +++++++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index f1bd2d1b..3d61660c 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -799,6 +799,7 @@ export default async function run( await fsp.unlink(inPath) } } + return assembly })() jobsPromise.add(singleAssemblyPromise) @@ -877,6 +878,7 @@ export default async function run( } } await completeJob() + return assembly })() jobsPromise.add(jobPromise) diff --git a/test/e2e/cli/assemblies.test.ts b/test/e2e/cli/assemblies.test.ts index e12d9445..ea53d050 100644 --- a/test/e2e/cli/assemblies.test.ts +++ b/test/e2e/cli/assemblies.test.ts @@ -1,3 +1,4 @@ +import crypto from 'node:crypto' import fsp from 'node:fs/promises' import process from 'node:process' import { promisify } from 'node:util' @@ -230,6 +231,39 @@ describe('assemblies', () => { }), ) + it( + 'should download file with correct md5 hash', + testCase(async (client) => { + const infile = await imgPromise() + const steps = await stepsPromise() + + const output = new OutputCtl() + const { results } = await assembliesCreate(output, client, { + steps, + inputs: [infile], + output: 'out-md5.jpg', + }) + + // Get the assembly result to find the expected md5hash + // The results array contains assembly statuses + const assemblyResult = results[0] as { + results?: Record> + } + expect(assemblyResult).to.have.property('results') + const resultSteps = Object.values(assemblyResult.results ?? {}) + expect(resultSteps.length).to.be.greaterThan(0) + const firstResult = resultSteps[0]?.[0] + expect(firstResult).to.have.property('md5hash') + const expectedMd5 = firstResult?.md5hash + + // Calculate md5 of downloaded file + const downloadedBuffer = await fsp.readFile('out-md5.jpg') + const actualMd5 = crypto.createHash('md5').update(downloadedBuffer).digest('hex') + + expect(actualMd5).to.equal(expectedMd5) + }), + ) + it( 'should handle multiple inputs', testCase(async (client) => { From 13b3547316a2f8a35ad283e9e5f23b50ce362b75 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Thu, 4 Dec 2025 10:04:29 +0100 Subject: [PATCH 37/45] fix: prevent file descriptor exhaustion with concurrency limiting MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add --concurrency option and fix file handle leaks: 1. Default mode: Queue job metadata and process up to N at a time (default: 5). Creates fresh streams only when job slots available. 2. Single-assembly mode: Close streams immediately after collecting paths, create fresh streams only when uploading. Both fixes prevent EMFILE errors when processing many files. Tests verify: - "PROCESSING JOB" messages emitted with concurrency limiting - "STREAM CLOSED" messages emitted in single-assembly mode - Max concurrent jobs respects the concurrency limit 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- CHANGELOG.md | 2 + README.md | 3 + src/cli/assemblies-create.ts | 166 ++++++++++++++++++++++---------- src/cli/commands/assemblies.ts | 7 ++ test/e2e/cli/assemblies.test.ts | 94 ++++++++++++++++++ 5 files changed, 221 insertions(+), 51 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 328b4c45..7fa6f343 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,8 @@ You may also want to refer to [GitHub releases](https://github.com/transloadit/n - Add `--log-level (-l)` CLI option using syslog severity levels (err=3, warn=4, notice=5, info=6, debug=7, trace=8) - Add `--endpoint` CLI option for custom API endpoint (also reads `TRANSLOADIT_ENDPOINT` env var) - Add `--single-assembly` flag to `assemblies create` for passing multiple input files to a single assembly +- Add `--concurrency` option to `assemblies create` to limit parallel processing (default: 5) +- Fix file descriptor exhaustion by closing streams immediately and creating fresh ones on demand - Apply stricter biome lint rules (noExplicitAny, useAwait, noForEach, noNonNullAssertion) ## v4.0.7 diff --git a/README.md b/README.md index 27c4c12e..c041b465 100644 --- a/README.md +++ b/README.md @@ -79,6 +79,9 @@ npx transloadit assemblies create --template YOUR_TEMPLATE_ID --input images/ -- # Process multiple files in a single assembly npx transloadit assemblies create --template YOUR_TEMPLATE_ID --input file1.jpg --input file2.jpg --output results/ --single-assembly + +# Limit concurrent processing (default: 5) +npx transloadit assemblies create --template YOUR_TEMPLATE_ID --input images/ --output thumbs/ --concurrency 2 ``` ### Managing Assemblies diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index e6d5f9db..ed51a1e0 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -591,8 +591,11 @@ export interface AssembliesCreateOptions { del?: boolean reprocessStale?: boolean singleAssembly?: boolean + concurrency?: number } +const DEFAULT_CONCURRENCY = 5 + export default async function run( outputctl: IOutputCtl, client: Transloadit, @@ -607,6 +610,7 @@ export default async function run( del, reprocessStale, singleAssembly, + concurrency = DEFAULT_CONCURRENCY, }: AssembliesCreateOptions, ): Promise<{ results: unknown[]; hasFailures: boolean }> { // Quick fix for https://github.com/transloadit/transloadify/issues/13 @@ -694,13 +698,19 @@ export default async function run( }) if (singleAssembly) { - // Single-assembly mode: collect all jobs, then create one assembly with all inputs - const collectedJobs: Job[] = [] + // Single-assembly mode: collect file paths, then create one assembly with all inputs + // We close streams immediately to avoid exhausting file descriptors with many files + const collectedPaths: string[] = [] emitter.on('job', (job: Job) => { - const inPath = job.in ? ((job.in as fs.ReadStream).path as string | undefined) : undefined - outputctl.debug(`COLLECTING JOB ${inPath ?? 'null'}`) - collectedJobs.push(job) + if (job.in != null) { + const inPath = (job.in as fs.ReadStream).path as string + outputctl.debug(`COLLECTING JOB ${inPath}`) + collectedPaths.push(inPath) + // Close the stream immediately to avoid file descriptor exhaustion + ;(job.in as fs.ReadStream).destroy() + outputctl.debug(`STREAM CLOSED ${inPath}`) + } }) emitter.on('error', (err: Error) => { @@ -710,28 +720,25 @@ export default async function run( }) emitter.on('end', async () => { - if (collectedJobs.length === 0) { + if (collectedPaths.length === 0) { resolve({ results: [], hasFailures: false }) return } - // Build uploads object with all input files + // Build uploads object, creating fresh streams for each file const uploads: Record = {} const inputPaths: string[] = [] - for (const job of collectedJobs) { - if (job.in != null) { - const inPath = (job.in as fs.ReadStream).path as string - const basename = path.basename(inPath) - // Use a unique key if there are name collisions - let key = basename - let counter = 1 - while (key in uploads) { - key = `${path.parse(basename).name}_${counter}${path.parse(basename).ext}` - counter++ - } - uploads[key] = job.in - inputPaths.push(inPath) + for (const inPath of collectedPaths) { + const basename = path.basename(inPath) + // Use a unique key if there are name collisions + let key = basename + let counter = 1 + while (key in uploads) { + key = `${path.parse(basename).name}_${counter}${path.parse(basename).ext}` + counter++ } + uploads[key] = fs.createReadStream(inPath) + inputPaths.push(inPath) } outputctl.debug(`Creating single assembly with ${Object.keys(uploads).length} files`) @@ -814,16 +821,47 @@ export default async function run( resolve({ results, hasFailures: jobsPromise.hasFailures }) }) } else { - // Default mode: one assembly per file - emitter.on('job', (job: Job) => { - activeJobs.add(job) - const inPath = job.in ? ((job.in as fs.ReadStream).path as string | undefined) : undefined - const outPath = job.out?.path - outputctl.debug(`GOT JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) + // Default mode: one assembly per file with concurrency limiting + // Queue jobs and limit how many run in parallel to avoid file descriptor exhaustion + interface QueuedJob { + inPath: string | null + outPath: string | null + outMtime: Date | undefined + } + const jobQueue: QueuedJob[] = [] + let activeCount = 0 + let emitterEnded = false + let resolveWhenDone: (() => void) | null = null + + function tryProcessNext(): void { + while (activeCount < concurrency && jobQueue.length > 0) { + const queuedJob = jobQueue.shift() + if (!queuedJob) break + activeCount++ + processJob(queuedJob) + } + // Check if we're done (emitter ended, queue empty, no active jobs) + if (emitterEnded && jobQueue.length === 0 && activeCount === 0 && resolveWhenDone) { + resolveWhenDone() + } + } + + function processJob(queuedJob: QueuedJob): void { + const { inPath, outPath, outMtime } = queuedJob + outputctl.debug(`PROCESSING JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) + + // Create fresh streams for this job + const inStream = inPath ? fs.createReadStream(inPath) : null + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + inStream?.on('error', () => {}) + const outStream = outPath ? (fs.createWriteStream(outPath) as OutStream) : null + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + outStream?.on('error', () => {}) + if (outStream) outStream.mtime = outMtime let superceded = false - if (job.out != null) - job.out.on('finish', () => { + if (outStream != null) + outStream.on('finish', () => { superceded = true }) @@ -831,8 +869,8 @@ export default async function run( params, signal: abortController.signal, } - if (job.in != null) { - createOptions.uploads = { in: job.in } + if (inStream != null) { + createOptions.uploads = { in: inStream } } const jobPromise = (async () => { @@ -872,59 +910,85 @@ export default async function run( if (!firstResult || !firstResult[0]) throw new Error('No results in assembly') const resulturl = firstResult[0].url - if (job.out != null && resulturl) { + if (outStream != null && resulturl && !superceded) { outputctl.debug('DOWNLOADING') - await new Promise((resolve, reject) => { + await new Promise((dlResolve, dlReject) => { const get = resulturl.startsWith('https') ? https.get : http.get const req = get(resulturl, { signal: abortController.signal }, (res) => { if (res.statusCode !== 200) { const msg = `Server returned http status ${res.statusCode}` outputctl.error(msg) - return reject(new Error(msg)) + return dlReject(new Error(msg)) } - if (superceded) return resolve() + if (superceded) return dlResolve() - if (!job.out) { - return reject(new Error('Job output stream is undefined')) - } - res.pipe(job.out) - job.out.on('finish', () => res.unpipe()) - res.on('end', () => resolve()) + res.pipe(outStream) + outStream.on('finish', () => res.unpipe()) + res.on('end', () => dlResolve()) }) req.on('error', (err) => { - if (err.name === 'AbortError') return resolve() + if (err.name === 'AbortError') return dlResolve() outputctl.error(err.message) - reject(err) + dlReject(err) }) }) } - await completeJob() - })() - - jobsPromise.add(jobPromise) - async function completeJob(): Promise { - activeJobs.delete(job) - const inPath = job.in ? ((job.in as fs.ReadStream).path as string | undefined) : undefined - const outPath = job.out?.path outputctl.debug(`COMPLETED ${inPath ?? 'null'} ${outPath ?? 'null'}`) - if (del && job.in != null && inPath) { + if (del && inPath) { await fsp.unlink(inPath) } + return assembly + })().finally(() => { + activeCount-- + tryProcessNext() + }) + + jobsPromise.add(jobPromise) + } + + emitter.on('job', (job: Job) => { + activeJobs.add(job) + const inPath = job.in + ? (((job.in as fs.ReadStream).path as string | undefined) ?? null) + : null + const outPath = job.out?.path ?? null + const outMtime = job.out?.mtime + outputctl.debug(`GOT JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) + + // Close the original streams immediately - we'll create fresh ones when processing + if (job.in != null) { + ;(job.in as fs.ReadStream).destroy() } + if (job.out != null) { + job.out.destroy() + } + activeJobs.delete(job) + + // Queue the job metadata for later processing + jobQueue.push({ inPath, outPath, outMtime }) + tryProcessNext() }) emitter.on('error', (err: Error) => { // Abort all in-flight createAssembly calls to ensure clean shutdown abortController.abort() activeJobs.clear() + jobQueue.length = 0 // Clear the queue outputctl.error(err) reject(err) }) emitter.on('end', async () => { + emitterEnded = true + // If there are still jobs in queue or active, wait for them + if (jobQueue.length > 0 || activeCount > 0) { + await new Promise((r) => { + resolveWhenDone = r + }) + } const results = await jobsPromise.allSettled() resolve({ results, hasFailures: jobsPromise.hasFailures }) }) diff --git a/src/cli/commands/assemblies.ts b/src/cli/commands/assemblies.ts index e2c13476..af724a4c 100644 --- a/src/cli/commands/assemblies.ts +++ b/src/cli/commands/assemblies.ts @@ -1,5 +1,6 @@ import process from 'node:process' import { Command, Option } from 'clipanion' +import * as t from 'typanion' import * as assemblies from '../assemblies.ts' import assembliesCreate from '../assemblies-create.ts' import { AuthenticatedCommand } from './BaseCommand.ts' @@ -75,6 +76,11 @@ export class AssembliesCreateCommand extends AuthenticatedCommand { description: 'Pass all input files to a single assembly instead of one assembly per file', }) + concurrency = Option.String('--concurrency,-c', { + description: 'Maximum number of concurrent assemblies (default: 5)', + validator: t.isNumber(), + }) + protected async run(): Promise { if (!this.steps && !this.template) { this.output.error('assemblies create requires exactly one of either --steps or --template') @@ -124,6 +130,7 @@ export class AssembliesCreateCommand extends AuthenticatedCommand { del: this.deleteAfterProcessing, reprocessStale: this.reprocessStale, singleAssembly: this.singleAssembly, + concurrency: this.concurrency, }) return hasFailures ? 1 : undefined } diff --git a/test/e2e/cli/assemblies.test.ts b/test/e2e/cli/assemblies.test.ts index e12d9445..a6080fb8 100644 --- a/test/e2e/cli/assemblies.test.ts +++ b/test/e2e/cli/assemblies.test.ts @@ -479,5 +479,99 @@ describe('assemblies', () => { ).to.have.lengthOf(0) }), ) + + it( + 'should process many files with concurrency limiting', + testCase(async (client) => { + // Create 6 input files + const fileCount = 6 + const infiles = await Promise.all( + Array.from({ length: fileCount }, (_, i) => imgPromise(`in${i}.jpg`)), + ) + const steps = await stepsPromise() + await fsp.mkdir('out') + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: infiles, + output: 'out', + concurrency: 2, // Only process 2 at a time + }) + + // Verify all files were processed successfully + const outs = await fsp.readdir('out') + expect(outs).to.have.lengthOf(fileCount) + + // Analyze debug output to verify concurrency limiting was applied. + // The fixed code emits "PROCESSING JOB" when jobs start (up to concurrency limit). + // The unfixed code has no such limiting - all jobs start at once with "GOT JOB". + const debugOutput = output.get(true) as OutputEntry[] + const messages = debugOutput.map((e) => String(e.msg)) + + // Check that "PROCESSING JOB" messages exist (added by the fix) + const processingMessages = messages.filter((m) => m.startsWith('PROCESSING JOB')) + expect( + processingMessages.length, + 'Expected "PROCESSING JOB" debug messages from concurrency-limited processing', + ).to.be.greaterThan(0) + + // Track max concurrent jobs by counting "PROCESSING JOB" vs "COMPLETED" messages + let activeJobs = 0 + let maxActiveJobs = 0 + for (const msg of messages) { + if (msg.startsWith('PROCESSING JOB')) { + activeJobs++ + maxActiveJobs = Math.max(maxActiveJobs, activeJobs) + } else if (msg.startsWith('COMPLETED')) { + activeJobs-- + } + } + + // With concurrency=2, we should never have more than 2 jobs processing at once + expect( + maxActiveJobs, + 'Max concurrent jobs should not exceed concurrency limit', + ).to.be.at.most(2) + }), + ) + + it( + 'should close streams immediately in single-assembly mode', + testCase(async (client) => { + // Create multiple input files for single-assembly mode + const fileCount = 5 + const infiles = await Promise.all( + Array.from({ length: fileCount }, (_, i) => imgPromise(`in${i}.jpg`)), + ) + const steps = await stepsPromise() + await fsp.mkdir('out') + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: infiles, + output: 'out', + singleAssembly: true, // All files in one assembly + }) + + // Verify files were processed + const outs = await fsp.readdir('out') + expect(outs.length).to.be.greaterThan(0) + + // Analyze debug output to verify streams were handled properly. + // The fixed code emits "STREAM CLOSED" when closing streams during collection. + // The unfixed code keeps all streams open until upload, risking fd exhaustion. + const debugOutput = output.get(true) as OutputEntry[] + const messages = debugOutput.map((e) => String(e.msg)) + + // Check that streams were closed during collection (added by the fix) + const streamClosedMessages = messages.filter((m) => m.startsWith('STREAM CLOSED')) + expect( + streamClosedMessages.length, + 'Expected "STREAM CLOSED" messages indicating proper fd management', + ).to.be.greaterThan(0) + }), + ) }) }) From 400719f43ce3184ad56c339722d731243753c457 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Thu, 4 Dec 2025 15:48:02 +0100 Subject: [PATCH 38/45] perf: split assemblies e2e tests for parallel execution MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Split assemblies.test.ts into three files to leverage vitest's file-level parallelism: - assemblies.test.ts: get, delete, replay tests - assemblies-create.test.ts: create tests - assemblies-list.test.ts: list test (slowest at ~30s) This reduces e2e test wall time from ~73s to ~40s (~45% faster). 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- test/e2e/cli/assemblies-create.test.ts | 451 ++++++++++++++++++++++++ test/e2e/cli/assemblies-list.test.ts | 19 ++ test/e2e/cli/assemblies.test.ts | 456 ------------------------- 3 files changed, 470 insertions(+), 456 deletions(-) create mode 100644 test/e2e/cli/assemblies-create.test.ts create mode 100644 test/e2e/cli/assemblies-list.test.ts diff --git a/test/e2e/cli/assemblies-create.test.ts b/test/e2e/cli/assemblies-create.test.ts new file mode 100644 index 00000000..a8681bab --- /dev/null +++ b/test/e2e/cli/assemblies-create.test.ts @@ -0,0 +1,451 @@ +import crypto from 'node:crypto' +import fsp from 'node:fs/promises' +import process from 'node:process' +import { promisify } from 'node:util' +import { imageSize } from 'image-size' +import rreaddir from 'recursive-readdir' +import { describe, expect, it } from 'vitest' +import assembliesCreate from '../../../src/cli/assemblies-create.ts' +import OutputCtl from './OutputCtl.ts' +import type { OutputEntry } from './test-utils.ts' +import { testCase } from './test-utils.ts' + +const rreaddirAsync = promisify(rreaddir) + +describe('assemblies', () => { + describe('create', () => { + const genericImg = 'https://placehold.co/100.jpg' + + async function imgPromise(fname = 'in.jpg'): Promise { + const response = await fetch(genericImg) + if (!response.ok) { + throw new Error(`Failed to fetch image: ${response.status}`) + } + const buffer = Buffer.from(await response.arrayBuffer()) + await fsp.writeFile(fname, buffer) + return fname + } + + const genericSteps = { + resize: { + robot: '/image/resize', + use: ':original', + result: true, + width: 130, + height: 130, + }, + } + + async function stepsPromise( + _fname = 'steps.json', + steps: Record = genericSteps, + ): Promise { + await fsp.writeFile('steps.json', JSON.stringify(steps)) + return 'steps.json' + } + + it( + 'should transcode a file', + testCase(async (client) => { + const infile = await imgPromise() + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: [infile], + output: 'out.jpg', + }) + const result = output.get(true) as OutputEntry[] + + expect(result.length).to.be.at.least(3) + const msgs = result.map((r) => r.msg) + expect(msgs).to.include('GOT JOB in.jpg out.jpg') + expect(msgs).to.include('DOWNLOADING') + expect(msgs).to.include('COMPLETED in.jpg out.jpg') + + const imgBuffer = await fsp.readFile('out.jpg') + const dim = imageSize(new Uint8Array(imgBuffer)) + expect(dim).to.have.property('width').that.equals(130) + expect(dim).to.have.property('height').that.equals(130) + }), + ) + + it( + 'should download file with correct md5 hash', + testCase(async (client) => { + const infile = await imgPromise() + const steps = await stepsPromise() + + const output = new OutputCtl() + const { results } = await assembliesCreate(output, client, { + steps, + inputs: [infile], + output: 'out-md5.jpg', + }) + + // Get the assembly result to find the expected md5hash + // The results array contains assembly statuses + const assemblyResult = results[0] as { + results?: Record> + } + expect(assemblyResult).to.have.property('results') + const resultSteps = Object.values(assemblyResult.results ?? {}) + expect(resultSteps.length).to.be.greaterThan(0) + const firstResult = resultSteps[0]?.[0] + expect(firstResult).to.have.property('md5hash') + const expectedMd5 = firstResult?.md5hash + + // Calculate md5 of downloaded file + const downloadedBuffer = await fsp.readFile('out-md5.jpg') + const actualMd5 = crypto.createHash('md5').update(downloadedBuffer).digest('hex') + + expect(actualMd5).to.equal(expectedMd5) + }), + ) + + it( + 'should handle multiple inputs', + testCase(async (client) => { + const infiles = await Promise.all(['in1.jpg', 'in2.jpg', 'in3.jpg'].map(imgPromise)) + const steps = await stepsPromise() + await fsp.mkdir('out') + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: infiles, + output: 'out', + }) + + const outs = await fsp.readdir('out') + expect(outs[0]).to.equal('in1.jpg') + expect(outs[1]).to.equal('in2.jpg') + expect(outs[2]).to.equal('in3.jpg') + expect(outs).to.have.lengthOf(3) + }), + ) + + it( + 'should not output outside outdir', + testCase(async (client) => { + await fsp.mkdir('sub') + process.chdir('sub') + + const infile = await imgPromise('../in.jpg') + await fsp.mkdir('out') + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: [infile], + output: 'out', + }) + + const outs = await fsp.readdir('out') + expect(outs[0]).to.equal('in.jpg') + expect(outs).to.have.lengthOf(1) + + const ls = await fsp.readdir('.') + expect(ls).to.not.contain('in.jpg') + }), + ) + + it( + 'should structure output directory correctly', + testCase(async (client) => { + await fsp.mkdir('in') + await fsp.mkdir('in/sub') + await Promise.all(['1.jpg', 'in/2.jpg', 'in/sub/3.jpg'].map(imgPromise)) + await fsp.mkdir('out') + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + recursive: true, + steps, + inputs: ['1.jpg', 'in'], + output: 'out', + }) + + const outs = await rreaddirAsync('out') + expect(outs).to.include('out/1.jpg') + expect(outs).to.include('out/2.jpg') + expect(outs).to.include('out/sub/3.jpg') + expect(outs).to.have.lengthOf(3) + }), + ) + + it( + 'should not be recursive by default', + testCase(async (client) => { + await fsp.mkdir('in') + await fsp.mkdir('in/sub') + await Promise.all(['in/2.jpg', 'in/sub/3.jpg'].map(imgPromise)) + await fsp.mkdir('out') + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: ['in'], + output: 'out', + }) + + const outs = await rreaddirAsync('out') + expect(outs).to.include('out/2.jpg') + expect(outs).to.not.include('out/sub/3.jpg') + expect(outs).to.have.lengthOf(1) + }), + ) + + it( + 'should be able to handle directories recursively', + testCase(async (client) => { + await fsp.mkdir('in') + await fsp.mkdir('in/sub') + await Promise.all(['in/2.jpg', 'in/sub/3.jpg'].map(imgPromise)) + await fsp.mkdir('out') + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + recursive: true, + steps, + inputs: ['in'], + output: 'out', + }) + + const outs = await rreaddirAsync('out') + expect(outs).to.include('out/2.jpg') + expect(outs).to.include('out/sub/3.jpg') + expect(outs).to.have.lengthOf(2) + }), + ) + + it( + 'should detect outdir conflicts', + testCase(async (client) => { + await fsp.mkdir('in') + await Promise.all(['1.jpg', 'in/1.jpg'].map(imgPromise)) + await fsp.mkdir('out') + const steps = await stepsPromise() + + const output = new OutputCtl() + try { + await assembliesCreate(output, client, { + steps, + inputs: ['1.jpg', 'in'], + output: 'out', + }) + throw new Error('assembliesCreate didnt err; should have') + } catch (_err) { + const result = output.get() as OutputEntry[] + expect(result[result.length - 1]) + .to.have.property('type') + .that.equals('error') + expect(result[result.length - 1]) + .to.have.nested.property('msg.message') + .that.equals("Output collision between 'in/1.jpg' and '1.jpg'") + } + // Small delay to allow abort signals to propagate and streams to close + await new Promise((resolve) => setTimeout(resolve, 50)) + }), + ) + + it( + 'should not download the result if no output is specified', + testCase(async (client) => { + const infile = await imgPromise() + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: [infile], + output: null, + }) + const result = output.get(true) as OutputEntry[] + + // When no output is specified, we might still get debug messages but no actual downloads + const downloadingMsgs = result.filter((line) => String(line.msg) === 'DOWNLOADING') + expect(downloadingMsgs.length).to.be.lessThanOrEqual(1) + }), + ) + + it( + 'should accept invocations with no inputs', + testCase(async (client) => { + await imgPromise() + const steps = await stepsPromise('steps.json', { + import: { + robot: '/http/import', + url: genericImg, + }, + resize: { + robot: '/image/resize', + use: 'import', + result: true, + width: 130, + height: 130, + }, + }) + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: [], + output: 'out.jpg', + }) + + await fsp.access('out.jpg') + }), + ) + + it( + 'should allow deleting inputs after processing', + testCase(async (client) => { + const infile = await imgPromise() + const steps = await stepsPromise() + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: [infile], + output: null, + del: true, + }) + + try { + await fsp.access(infile) + throw new Error('File should have been deleted') + } catch (err) { + expect((err as NodeJS.ErrnoException).code).to.equal('ENOENT') + } + }), + ) + + it( + 'should not reprocess inputs that are older than their output', + testCase(async (client) => { + const infiles = await Promise.all(['in1.jpg', 'in2.jpg', 'in3.jpg'].map(imgPromise)) + const steps = await stepsPromise() + await fsp.mkdir('out') + + const output1 = new OutputCtl() + await assembliesCreate(output1, client, { + steps, + inputs: [infiles[0] as string], + output: 'out', + }) + + const output2 = new OutputCtl() + await assembliesCreate(output2, client, { + steps, + inputs: infiles, + output: 'out', + }) + const result = output2.get(true) as OutputEntry[] + + expect( + result.map((line) => line.msg).filter((msg) => String(msg).includes('in1.jpg')), + ).to.have.lengthOf(0) + }), + ) + + it( + 'should process many files with concurrency limiting', + testCase(async (client) => { + // Create 6 input files + const fileCount = 6 + const infiles = await Promise.all( + Array.from({ length: fileCount }, (_, i) => imgPromise(`in${i}.jpg`)), + ) + const steps = await stepsPromise() + await fsp.mkdir('out') + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: infiles, + output: 'out', + concurrency: 2, // Only process 2 at a time + }) + + // Verify all files were processed successfully + const outs = await fsp.readdir('out') + expect(outs).to.have.lengthOf(fileCount) + + // Analyze debug output to verify concurrency limiting was applied. + // The fixed code emits "PROCESSING JOB" when jobs start (up to concurrency limit). + // The unfixed code has no such limiting - all jobs start at once with "GOT JOB". + const debugOutput = output.get(true) as OutputEntry[] + const messages = debugOutput.map((e) => String(e.msg)) + + // Check that "PROCESSING JOB" messages exist (added by the fix) + const processingMessages = messages.filter((m) => m.startsWith('PROCESSING JOB')) + expect( + processingMessages.length, + 'Expected "PROCESSING JOB" debug messages from concurrency-limited processing', + ).to.be.greaterThan(0) + + // Track max concurrent jobs by counting "PROCESSING JOB" vs "COMPLETED" messages + let activeJobs = 0 + let maxActiveJobs = 0 + for (const msg of messages) { + if (msg.startsWith('PROCESSING JOB')) { + activeJobs++ + maxActiveJobs = Math.max(maxActiveJobs, activeJobs) + } else if (msg.startsWith('COMPLETED')) { + activeJobs-- + } + } + + // With concurrency=2, we should never have more than 2 jobs processing at once + expect( + maxActiveJobs, + 'Max concurrent jobs should not exceed concurrency limit', + ).to.be.at.most(2) + }), + ) + + it( + 'should close streams immediately in single-assembly mode', + testCase(async (client) => { + // Create multiple input files for single-assembly mode + const fileCount = 5 + const infiles = await Promise.all( + Array.from({ length: fileCount }, (_, i) => imgPromise(`in${i}.jpg`)), + ) + const steps = await stepsPromise() + await fsp.mkdir('out') + + const output = new OutputCtl() + await assembliesCreate(output, client, { + steps, + inputs: infiles, + output: 'out', + singleAssembly: true, // All files in one assembly + }) + + // Verify files were processed + const outs = await fsp.readdir('out') + expect(outs.length).to.be.greaterThan(0) + + // Analyze debug output to verify streams were handled properly. + // The fixed code emits "STREAM CLOSED" when closing streams during collection. + // The unfixed code keeps all streams open until upload, risking fd exhaustion. + const debugOutput = output.get(true) as OutputEntry[] + const messages = debugOutput.map((e) => String(e.msg)) + + // Check that streams were closed during collection (added by the fix) + const streamClosedMessages = messages.filter((m) => m.startsWith('STREAM CLOSED')) + expect( + streamClosedMessages.length, + 'Expected "STREAM CLOSED" messages indicating proper fd management', + ).to.be.greaterThan(0) + }), + ) + }) +}) diff --git a/test/e2e/cli/assemblies-list.test.ts b/test/e2e/cli/assemblies-list.test.ts new file mode 100644 index 00000000..1d58140b --- /dev/null +++ b/test/e2e/cli/assemblies-list.test.ts @@ -0,0 +1,19 @@ +import { describe, expect, it } from 'vitest' +import * as assemblies from '../../../src/cli/assemblies.ts' +import OutputCtl from './OutputCtl.ts' +import type { OutputEntry } from './test-utils.ts' +import { testCase } from './test-utils.ts' + +describe('assemblies', () => { + describe('list', () => { + it( + 'should list assemblies', + testCase(async (client) => { + const output = new OutputCtl() + await assemblies.list(output, client, { pagesize: 1 }) + const logs = output.get() as OutputEntry[] + expect(logs.filter((l) => l.type === 'error')).to.have.lengthOf(0) + }), + ) + }) +}) diff --git a/test/e2e/cli/assemblies.test.ts b/test/e2e/cli/assemblies.test.ts index 35eca9a7..b769f63b 100644 --- a/test/e2e/cli/assemblies.test.ts +++ b/test/e2e/cli/assemblies.test.ts @@ -1,19 +1,11 @@ -import crypto from 'node:crypto' import fsp from 'node:fs/promises' -import process from 'node:process' -import { promisify } from 'node:util' -import { imageSize } from 'image-size' -import rreaddir from 'recursive-readdir' import { describe, expect, it } from 'vitest' import * as assemblies from '../../../src/cli/assemblies.ts' -import assembliesCreate from '../../../src/cli/assemblies-create.ts' import { zip } from '../../../src/cli/helpers.ts' import OutputCtl from './OutputCtl.ts' import type { OutputEntry } from './test-utils.ts' import { testCase } from './test-utils.ts' -const rreaddirAsync = promisify(rreaddir) - describe('assemblies', () => { describe('get', () => { it( @@ -74,18 +66,6 @@ describe('assemblies', () => { ) }) - describe('list', () => { - it( - 'should list assemblies', - testCase(async (client) => { - const output = new OutputCtl() - await assemblies.list(output, client, { pagesize: 1 }) - const logs = output.get() as OutputEntry[] - expect(logs.filter((l) => l.type === 'error')).to.have.lengthOf(0) - }), - ) - }) - describe('delete', () => { it( 'should delete assemblies', @@ -172,440 +152,4 @@ describe('assemblies', () => { }), ) }) - - describe('create', () => { - const genericImg = 'https://placehold.co/100.jpg' - - async function imgPromise(fname = 'in.jpg'): Promise { - const response = await fetch(genericImg) - if (!response.ok) { - throw new Error(`Failed to fetch image: ${response.status}`) - } - const buffer = Buffer.from(await response.arrayBuffer()) - await fsp.writeFile(fname, buffer) - return fname - } - - const genericSteps = { - resize: { - robot: '/image/resize', - use: ':original', - result: true, - width: 130, - height: 130, - }, - } - - async function stepsPromise( - _fname = 'steps.json', - steps: Record = genericSteps, - ): Promise { - await fsp.writeFile('steps.json', JSON.stringify(steps)) - return 'steps.json' - } - - it( - 'should transcode a file', - testCase(async (client) => { - const infile = await imgPromise() - const steps = await stepsPromise() - - const output = new OutputCtl() - await assembliesCreate(output, client, { - steps, - inputs: [infile], - output: 'out.jpg', - }) - const result = output.get(true) as OutputEntry[] - - expect(result.length).to.be.at.least(3) - const msgs = result.map((r) => r.msg) - expect(msgs).to.include('GOT JOB in.jpg out.jpg') - expect(msgs).to.include('DOWNLOADING') - expect(msgs).to.include('COMPLETED in.jpg out.jpg') - - const imgBuffer = await fsp.readFile('out.jpg') - const dim = imageSize(new Uint8Array(imgBuffer)) - expect(dim).to.have.property('width').that.equals(130) - expect(dim).to.have.property('height').that.equals(130) - }), - ) - - it( - 'should download file with correct md5 hash', - testCase(async (client) => { - const infile = await imgPromise() - const steps = await stepsPromise() - - const output = new OutputCtl() - const { results } = await assembliesCreate(output, client, { - steps, - inputs: [infile], - output: 'out-md5.jpg', - }) - - // Get the assembly result to find the expected md5hash - // The results array contains assembly statuses - const assemblyResult = results[0] as { - results?: Record> - } - expect(assemblyResult).to.have.property('results') - const resultSteps = Object.values(assemblyResult.results ?? {}) - expect(resultSteps.length).to.be.greaterThan(0) - const firstResult = resultSteps[0]?.[0] - expect(firstResult).to.have.property('md5hash') - const expectedMd5 = firstResult?.md5hash - - // Calculate md5 of downloaded file - const downloadedBuffer = await fsp.readFile('out-md5.jpg') - const actualMd5 = crypto.createHash('md5').update(downloadedBuffer).digest('hex') - - expect(actualMd5).to.equal(expectedMd5) - }), - ) - - it( - 'should handle multiple inputs', - testCase(async (client) => { - const infiles = await Promise.all(['in1.jpg', 'in2.jpg', 'in3.jpg'].map(imgPromise)) - const steps = await stepsPromise() - await fsp.mkdir('out') - - const output = new OutputCtl() - await assembliesCreate(output, client, { - steps, - inputs: infiles, - output: 'out', - }) - - const outs = await fsp.readdir('out') - expect(outs[0]).to.equal('in1.jpg') - expect(outs[1]).to.equal('in2.jpg') - expect(outs[2]).to.equal('in3.jpg') - expect(outs).to.have.lengthOf(3) - }), - ) - - it( - 'should not output outside outdir', - testCase(async (client) => { - await fsp.mkdir('sub') - process.chdir('sub') - - const infile = await imgPromise('../in.jpg') - await fsp.mkdir('out') - const steps = await stepsPromise() - - const output = new OutputCtl() - await assembliesCreate(output, client, { - steps, - inputs: [infile], - output: 'out', - }) - - const outs = await fsp.readdir('out') - expect(outs[0]).to.equal('in.jpg') - expect(outs).to.have.lengthOf(1) - - const ls = await fsp.readdir('.') - expect(ls).to.not.contain('in.jpg') - }), - ) - - it( - 'should structure output directory correctly', - testCase(async (client) => { - await fsp.mkdir('in') - await fsp.mkdir('in/sub') - await Promise.all(['1.jpg', 'in/2.jpg', 'in/sub/3.jpg'].map(imgPromise)) - await fsp.mkdir('out') - const steps = await stepsPromise() - - const output = new OutputCtl() - await assembliesCreate(output, client, { - recursive: true, - steps, - inputs: ['1.jpg', 'in'], - output: 'out', - }) - - const outs = await rreaddirAsync('out') - expect(outs).to.include('out/1.jpg') - expect(outs).to.include('out/2.jpg') - expect(outs).to.include('out/sub/3.jpg') - expect(outs).to.have.lengthOf(3) - }), - ) - - it( - 'should not be recursive by default', - testCase(async (client) => { - await fsp.mkdir('in') - await fsp.mkdir('in/sub') - await Promise.all(['in/2.jpg', 'in/sub/3.jpg'].map(imgPromise)) - await fsp.mkdir('out') - const steps = await stepsPromise() - - const output = new OutputCtl() - await assembliesCreate(output, client, { - steps, - inputs: ['in'], - output: 'out', - }) - - const outs = await rreaddirAsync('out') - expect(outs).to.include('out/2.jpg') - expect(outs).to.not.include('out/sub/3.jpg') - expect(outs).to.have.lengthOf(1) - }), - ) - - it( - 'should be able to handle directories recursively', - testCase(async (client) => { - await fsp.mkdir('in') - await fsp.mkdir('in/sub') - await Promise.all(['in/2.jpg', 'in/sub/3.jpg'].map(imgPromise)) - await fsp.mkdir('out') - const steps = await stepsPromise() - - const output = new OutputCtl() - await assembliesCreate(output, client, { - recursive: true, - steps, - inputs: ['in'], - output: 'out', - }) - - const outs = await rreaddirAsync('out') - expect(outs).to.include('out/2.jpg') - expect(outs).to.include('out/sub/3.jpg') - expect(outs).to.have.lengthOf(2) - }), - ) - - it( - 'should detect outdir conflicts', - testCase(async (client) => { - await fsp.mkdir('in') - await Promise.all(['1.jpg', 'in/1.jpg'].map(imgPromise)) - await fsp.mkdir('out') - const steps = await stepsPromise() - - const output = new OutputCtl() - try { - await assembliesCreate(output, client, { - steps, - inputs: ['1.jpg', 'in'], - output: 'out', - }) - throw new Error('assembliesCreate didnt err; should have') - } catch (_err) { - const result = output.get() as OutputEntry[] - expect(result[result.length - 1]) - .to.have.property('type') - .that.equals('error') - expect(result[result.length - 1]) - .to.have.nested.property('msg.message') - .that.equals("Output collision between 'in/1.jpg' and '1.jpg'") - } - // Small delay to allow abort signals to propagate and streams to close - await new Promise((resolve) => setTimeout(resolve, 50)) - }), - ) - - it( - 'should not download the result if no output is specified', - testCase(async (client) => { - const infile = await imgPromise() - const steps = await stepsPromise() - - const output = new OutputCtl() - await assembliesCreate(output, client, { - steps, - inputs: [infile], - output: null, - }) - const result = output.get(true) as OutputEntry[] - - // When no output is specified, we might still get debug messages but no actual downloads - const downloadingMsgs = result.filter((line) => String(line.msg) === 'DOWNLOADING') - expect(downloadingMsgs.length).to.be.lessThanOrEqual(1) - }), - ) - - it( - 'should accept invocations with no inputs', - testCase(async (client) => { - await imgPromise() - const steps = await stepsPromise('steps.json', { - import: { - robot: '/http/import', - url: genericImg, - }, - resize: { - robot: '/image/resize', - use: 'import', - result: true, - width: 130, - height: 130, - }, - }) - - const output = new OutputCtl() - await assembliesCreate(output, client, { - steps, - inputs: [], - output: 'out.jpg', - }) - - await fsp.access('out.jpg') - }), - ) - - it( - 'should allow deleting inputs after processing', - testCase(async (client) => { - const infile = await imgPromise() - const steps = await stepsPromise() - - const output = new OutputCtl() - await assembliesCreate(output, client, { - steps, - inputs: [infile], - output: null, - del: true, - }) - - try { - await fsp.access(infile) - throw new Error('File should have been deleted') - } catch (err) { - expect((err as NodeJS.ErrnoException).code).to.equal('ENOENT') - } - }), - ) - - it( - 'should not reprocess inputs that are older than their output', - testCase(async (client) => { - const infiles = await Promise.all(['in1.jpg', 'in2.jpg', 'in3.jpg'].map(imgPromise)) - const steps = await stepsPromise() - await fsp.mkdir('out') - - const output1 = new OutputCtl() - await assembliesCreate(output1, client, { - steps, - inputs: [infiles[0] as string], - output: 'out', - }) - - const output2 = new OutputCtl() - await assembliesCreate(output2, client, { - steps, - inputs: infiles, - output: 'out', - }) - const result = output2.get(true) as OutputEntry[] - - expect( - result.map((line) => line.msg).filter((msg) => String(msg).includes('in1.jpg')), - ).to.have.lengthOf(0) - }), - ) - - it( - 'should process many files with concurrency limiting', - testCase(async (client) => { - // Create 6 input files - const fileCount = 6 - const infiles = await Promise.all( - Array.from({ length: fileCount }, (_, i) => imgPromise(`in${i}.jpg`)), - ) - const steps = await stepsPromise() - await fsp.mkdir('out') - - const output = new OutputCtl() - await assembliesCreate(output, client, { - steps, - inputs: infiles, - output: 'out', - concurrency: 2, // Only process 2 at a time - }) - - // Verify all files were processed successfully - const outs = await fsp.readdir('out') - expect(outs).to.have.lengthOf(fileCount) - - // Analyze debug output to verify concurrency limiting was applied. - // The fixed code emits "PROCESSING JOB" when jobs start (up to concurrency limit). - // The unfixed code has no such limiting - all jobs start at once with "GOT JOB". - const debugOutput = output.get(true) as OutputEntry[] - const messages = debugOutput.map((e) => String(e.msg)) - - // Check that "PROCESSING JOB" messages exist (added by the fix) - const processingMessages = messages.filter((m) => m.startsWith('PROCESSING JOB')) - expect( - processingMessages.length, - 'Expected "PROCESSING JOB" debug messages from concurrency-limited processing', - ).to.be.greaterThan(0) - - // Track max concurrent jobs by counting "PROCESSING JOB" vs "COMPLETED" messages - let activeJobs = 0 - let maxActiveJobs = 0 - for (const msg of messages) { - if (msg.startsWith('PROCESSING JOB')) { - activeJobs++ - maxActiveJobs = Math.max(maxActiveJobs, activeJobs) - } else if (msg.startsWith('COMPLETED')) { - activeJobs-- - } - } - - // With concurrency=2, we should never have more than 2 jobs processing at once - expect( - maxActiveJobs, - 'Max concurrent jobs should not exceed concurrency limit', - ).to.be.at.most(2) - }), - ) - - it( - 'should close streams immediately in single-assembly mode', - testCase(async (client) => { - // Create multiple input files for single-assembly mode - const fileCount = 5 - const infiles = await Promise.all( - Array.from({ length: fileCount }, (_, i) => imgPromise(`in${i}.jpg`)), - ) - const steps = await stepsPromise() - await fsp.mkdir('out') - - const output = new OutputCtl() - await assembliesCreate(output, client, { - steps, - inputs: infiles, - output: 'out', - singleAssembly: true, // All files in one assembly - }) - - // Verify files were processed - const outs = await fsp.readdir('out') - expect(outs.length).to.be.greaterThan(0) - - // Analyze debug output to verify streams were handled properly. - // The fixed code emits "STREAM CLOSED" when closing streams during collection. - // The unfixed code keeps all streams open until upload, risking fd exhaustion. - const debugOutput = output.get(true) as OutputEntry[] - const messages = debugOutput.map((e) => String(e.msg)) - - // Check that streams were closed during collection (added by the fix) - const streamClosedMessages = messages.filter((m) => m.startsWith('STREAM CLOSED')) - expect( - streamClosedMessages.length, - 'Expected "STREAM CLOSED" messages indicating proper fd management', - ).to.be.greaterThan(0) - }), - ) - }) }) From 7d50065c21b43bf9663616d6ba99fcb34862e2ad Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Thu, 4 Dec 2025 16:01:21 +0100 Subject: [PATCH 39/45] upgrade yarn, add p-queue --- package.json | 5 +++-- yarn.lock | 25 +++++++++++++++++++++++++ 2 files changed, 28 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index f3a5075a..c802f806 100644 --- a/package.json +++ b/package.json @@ -22,17 +22,18 @@ "@transloadit/sev-logger": "^0.0.15", "clipanion": "^4.0.0-rc.4", "debug": "^4.4.3", + "dotenv": "^17.2.3", "form-data": "^4.0.4", "got": "14.4.9", "into-stream": "^9.0.0", "is-stream": "^4.0.1", "node-watch": "^0.7.4", "p-map": "^7.0.3", + "p-queue": "^9.0.1", "recursive-readdir": "^2.2.3", "tus-js-client": "^4.3.1", "type-fest": "^4.41.0", - "zod": "3.25.76", - "dotenv": "^17.2.3" + "zod": "3.25.76" }, "devDependencies": { "@biomejs/biome": "^2.2.4", diff --git a/yarn.lock b/yarn.lock index f9363fa6..608edba8 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2967,6 +2967,13 @@ __metadata: languageName: node linkType: hard +"eventemitter3@npm:^5.0.1": + version: 5.0.1 + resolution: "eventemitter3@npm:5.0.1" + checksum: 10c0/4ba5c00c506e6c786b4d6262cfbce90ddc14c10d4667e5c83ae993c9de88aa856033994dd2b35b83e8dc1170e224e66a319fa80adc4c32adcd2379bbc75da814 + languageName: node + linkType: hard + "execa@npm:9.6.0": version: 9.6.0 resolution: "execa@npm:9.6.0" @@ -4377,6 +4384,16 @@ __metadata: languageName: node linkType: hard +"p-queue@npm:^9.0.1": + version: 9.0.1 + resolution: "p-queue@npm:9.0.1" + dependencies: + eventemitter3: "npm:^5.0.1" + p-timeout: "npm:^7.0.0" + checksum: 10c0/912bd0c09ec910e6851973757afd23b80e12210948b4e235ebaa752e32e782d7664438c948cd343e2620cf26340665ccdaa2715e4e3c52cf02fe11c5152c225f + languageName: node + linkType: hard + "p-retry@npm:^7.0.0": version: 7.0.0 resolution: "p-retry@npm:7.0.0" @@ -4386,6 +4403,13 @@ __metadata: languageName: node linkType: hard +"p-timeout@npm:^7.0.0": + version: 7.0.1 + resolution: "p-timeout@npm:7.0.1" + checksum: 10c0/87d96529d1096d506607218dba6f9ec077c6dbedd0c2e2788c748e33bcd05faae8a81009fd9d22ec0b3c95fc83f4717306baba223f6e464737d8b99294c3e863 + languageName: node + linkType: hard + "package-json-from-dist@npm:^1.0.0, package-json-from-dist@npm:^1.0.1": version: 1.0.1 resolution: "package-json-from-dist@npm:1.0.1" @@ -5369,6 +5393,7 @@ __metadata: node-watch: "npm:^0.7.4" npm-run-all: "npm:^4.1.5" p-map: "npm:^7.0.3" + p-queue: "npm:^9.0.1" p-retry: "npm:^7.0.0" recursive-readdir: "npm:^2.2.3" rimraf: "npm:^6.1.2" From 523e28907016e476fc64e5f94288f61620902864 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Thu, 4 Dec 2025 16:01:57 +0100 Subject: [PATCH 40/45] upgrade yarn --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index c802f806..4079271c 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "mp3" ], "author": "Tim Koschuetzki ", - "packageManager": "yarn@4.10.3", + "packageManager": "yarn@4.12.0", "engines": { "node": ">= 20" }, From 9b3fe42c81f78d5280eb1b9138668faf1cc948c2 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Thu, 4 Dec 2025 16:04:06 +0100 Subject: [PATCH 41/45] use p-queue instead of JobPromise --- src/cli/JobsPromise.ts | 61 ------ src/cli/assemblies-create.ts | 404 ++++++++++++++++------------------- 2 files changed, 184 insertions(+), 281 deletions(-) delete mode 100644 src/cli/JobsPromise.ts diff --git a/src/cli/JobsPromise.ts b/src/cli/JobsPromise.ts deleted file mode 100644 index 9ecafca4..00000000 --- a/src/cli/JobsPromise.ts +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Tracks a collection of promises and emits errors as they occur. - * Used to run multiple async operations in parallel while: - * 1. Reporting errors as they happen (via onError callback) - * 2. Waiting for all operations to complete at the end - * 3. Tracking whether any failures occurred - */ -export default class JobsPromise { - private promises: Set> = new Set() - private onError: ((err: unknown) => void) | null = null - private _hasFailures = false - - /** - * Set the error handler for individual promise rejections. - * Errors are reported immediately when promises reject. - */ - setErrorHandler(handler: (err: unknown) => void): void { - this.onError = handler - } - - /** - * Add a promise to track. If the promise rejects, - * the error handler will be called. - * @throws Error if error handler has not been set via setErrorHandler() - */ - add(promise: Promise): void { - if (this.onError === null) { - throw new Error('JobsPromise: error handler must be set before adding promises') - } - this.promises.add(promise) - const errorHandler = this.onError - promise - .catch((err: unknown) => { - this._hasFailures = true - errorHandler(err) - }) - .finally(() => { - this.promises.delete(promise) - }) - } - - /** - * Returns true if any tracked promise has rejected. - */ - get hasFailures(): boolean { - return this._hasFailures - } - - /** - * Wait for all tracked promises to settle. - * Returns array of fulfilled values (rejects are filtered out - * since errors were already handled via the error handler). - */ - async allSettled(): Promise { - const promises = [...this.promises] - const results = await Promise.allSettled(promises) - return results - .filter((r): r is PromiseFulfilledResult => r.status === 'fulfilled') - .map((r) => r.value) - } -} diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index c8a29b2c..565f3902 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -8,11 +8,11 @@ import { pipeline } from 'node:stream/promises' import tty from 'node:tty' import { promisify } from 'node:util' import got from 'got' +import PQueue from 'p-queue' import { tryCatch } from '../alphalib/tryCatch.ts' import type { StepsInput } from '../alphalib/types/template.ts' import type { CreateAssemblyParams } from '../apiTypes.ts' import type { CreateAssemblyOptions, Transloadit } from '../Transloadit.ts' -import JobsPromise from './JobsPromise.ts' import type { IOutputCtl } from './OutputCtl.ts' import { isErrnoException } from './types.ts' @@ -473,12 +473,9 @@ function detectConflicts(jobEmitter: EventEmitter): MyEventEmitter { function dismissStaleJobs(jobEmitter: EventEmitter): MyEventEmitter { const emitter = new MyEventEmitter() + const pendingChecks: Promise[] = [] - const jobsPromise = new JobsPromise() - // Errors are already caught in the promises passed to add(), so use a no-op handler - jobsPromise.setErrorHandler(() => {}) - - jobEmitter.on('end', () => jobsPromise.allSettled().then(() => emitter.emit('end'))) + jobEmitter.on('end', () => Promise.all(pendingChecks).then(() => emitter.emit('end'))) jobEmitter.on('error', (err: Error) => emitter.emit('error', err)) jobEmitter.on('job', (job: Job) => { if (job.in == null || job.out == null) { @@ -487,19 +484,18 @@ function dismissStaleJobs(jobEmitter: EventEmitter): MyEventEmitter { } const inPath = (job.in as fs.ReadStream).path as string - jobsPromise.add( - fsp - .stat(inPath) - .then((stats) => { - const inM = stats.mtime - const outM = job.out?.mtime ?? new Date(0) - - if (outM <= inM) emitter.emit('job', job) - }) - .catch(() => { - emitter.emit('job', job) - }), - ) + const checkPromise = fsp + .stat(inPath) + .then((stats) => { + const inM = stats.mtime + const outM = job.out?.mtime ?? new Date(0) + + if (outM <= inM) emitter.emit('job', job) + }) + .catch(() => { + emitter.emit('job', job) + }) + pendingChecks.push(checkPromise) }) return emitter @@ -687,15 +683,96 @@ export default async function run( reprocessStale, }) - const jobsPromise = new JobsPromise() - const activeJobs: Set = new Set() + // Use p-queue for concurrency management + const queue = new PQueue({ concurrency }) + const results: unknown[] = [] + let hasFailures = false // AbortController to cancel all in-flight createAssembly calls when an error occurs const abortController = new AbortController() - // Set error handler before subscribing to events that might call add() - jobsPromise.setErrorHandler((err: unknown) => { - outputctl.error(err as Error) - }) + // Helper to process a single assembly job + async function processAssemblyJob( + inPath: string | null, + outPath: string | null, + outMtime: Date | undefined, + ): Promise { + outputctl.debug(`PROCESSING JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) + + // Create fresh streams for this job + const inStream = inPath ? fs.createReadStream(inPath) : null + inStream?.on('error', () => {}) + const outStream = outPath ? (fs.createWriteStream(outPath) as OutStream) : null + outStream?.on('error', () => {}) + if (outStream) outStream.mtime = outMtime + + let superceded = false + if (outStream != null) { + outStream.on('finish', () => { + superceded = true + }) + } + + const createOptions: CreateAssemblyOptions = { + params, + signal: abortController.signal, + } + if (inStream != null) { + createOptions.uploads = { in: inStream } + } + + const result = await client.createAssembly(createOptions) + if (superceded) return undefined + + const assemblyId = result.assembly_id + if (!assemblyId) throw new Error('No assembly_id in result') + + const assembly = await client.awaitAssemblyCompletion(assemblyId, { + signal: abortController.signal, + onPoll: () => { + if (superceded) return false + return true + }, + onAssemblyProgress: (status) => { + outputctl.debug(`Assembly status: ${status.ok}`) + }, + }) + + if (superceded) return undefined + + if (assembly.error || (assembly.ok && assembly.ok !== 'ASSEMBLY_COMPLETED')) { + const msg = `Assembly failed: ${assembly.error || assembly.message} (Status: ${assembly.ok})` + outputctl.error(msg) + throw new Error(msg) + } + + if (!assembly.results) throw new Error('No results in assembly') + const resultsKeys = Object.keys(assembly.results) + const firstKey = resultsKeys[0] + if (!firstKey) throw new Error('No results in assembly') + const firstResult = assembly.results[firstKey] + if (!firstResult || !firstResult[0]) throw new Error('No results in assembly') + const resulturl = firstResult[0].url + + if (outStream != null && resulturl && !superceded) { + outputctl.debug('DOWNLOADING') + const [dlErr] = await tryCatch( + pipeline(got.stream(resulturl, { signal: abortController.signal }), outStream), + ) + if (dlErr) { + if (dlErr.name !== 'AbortError') { + outputctl.error(dlErr.message) + throw dlErr + } + } + } + + outputctl.debug(`COMPLETED ${inPath ?? 'null'} ${outPath ?? 'null'}`) + + if (del && inPath) { + await fsp.unlink(inPath) + } + return assembly + } if (singleAssembly) { // Single-assembly mode: collect file paths, then create one assembly with all inputs @@ -715,6 +792,7 @@ export default async function run( emitter.on('error', (err: Error) => { abortController.abort() + queue.clear() outputctl.error(err) reject(err) }) @@ -730,7 +808,6 @@ export default async function run( const inputPaths: string[] = [] for (const inPath of collectedPaths) { const basename = path.basename(inPath) - // Use a unique key if there are name collisions let key = basename let counter = 1 while (key in uploads) { @@ -743,195 +820,82 @@ export default async function run( outputctl.debug(`Creating single assembly with ${Object.keys(uploads).length} files`) - const singleAssemblyPromise = (async () => { - const createOptions: CreateAssemblyOptions = { - params, - signal: abortController.signal, - } - if (Object.keys(uploads).length > 0) { - createOptions.uploads = uploads - } - - const result = await client.createAssembly(createOptions) - const assemblyId = result.assembly_id - if (!assemblyId) throw new Error('No assembly_id in result') - - const assembly = await client.awaitAssemblyCompletion(assemblyId, { - signal: abortController.signal, - onAssemblyProgress: (status) => { - outputctl.debug(`Assembly status: ${status.ok}`) - }, - }) - - if (assembly.error || (assembly.ok && assembly.ok !== 'ASSEMBLY_COMPLETED')) { - const msg = `Assembly failed: ${assembly.error || assembly.message} (Status: ${assembly.ok})` - outputctl.error(msg) - throw new Error(msg) - } + try { + const assembly = await queue.add(async () => { + const createOptions: CreateAssemblyOptions = { + params, + signal: abortController.signal, + } + if (Object.keys(uploads).length > 0) { + createOptions.uploads = uploads + } - // Download all results - if (assembly.results && resolvedOutput != null) { - for (const [stepName, stepResults] of Object.entries(assembly.results)) { - for (const stepResult of stepResults) { - const resultUrl = stepResult.url - if (!resultUrl) continue - - // Determine output path - let outPath: string - if (outstat?.isDirectory()) { - outPath = path.join(resolvedOutput, stepResult.name || `${stepName}_result`) - } else { - outPath = resolvedOutput - } + const result = await client.createAssembly(createOptions) + const assemblyId = result.assembly_id + if (!assemblyId) throw new Error('No assembly_id in result') + + const asm = await client.awaitAssemblyCompletion(assemblyId, { + signal: abortController.signal, + onAssemblyProgress: (status) => { + outputctl.debug(`Assembly status: ${status.ok}`) + }, + }) + + if (asm.error || (asm.ok && asm.ok !== 'ASSEMBLY_COMPLETED')) { + const msg = `Assembly failed: ${asm.error || asm.message} (Status: ${asm.ok})` + outputctl.error(msg) + throw new Error(msg) + } - outputctl.debug(`DOWNLOADING ${stepResult.name} to ${outPath}`) - const [dlErr] = await tryCatch( - pipeline( - got.stream(resultUrl, { signal: abortController.signal }), - fs.createWriteStream(outPath), - ), - ) - if (dlErr) { - if (dlErr.name === 'AbortError') continue - outputctl.error(dlErr.message) - throw dlErr + // Download all results + if (asm.results && resolvedOutput != null) { + for (const [stepName, stepResults] of Object.entries(asm.results)) { + for (const stepResult of stepResults) { + const resultUrl = stepResult.url + if (!resultUrl) continue + + let outPath: string + if (outstat?.isDirectory()) { + outPath = path.join(resolvedOutput, stepResult.name || `${stepName}_result`) + } else { + outPath = resolvedOutput + } + + outputctl.debug(`DOWNLOADING ${stepResult.name} to ${outPath}`) + const [dlErr] = await tryCatch( + pipeline( + got.stream(resultUrl, { signal: abortController.signal }), + fs.createWriteStream(outPath), + ), + ) + if (dlErr) { + if (dlErr.name === 'AbortError') continue + outputctl.error(dlErr.message) + throw dlErr + } } } } - } - // Delete input files if requested - if (del) { - for (const inPath of inputPaths) { - await fsp.unlink(inPath) + // Delete input files if requested + if (del) { + for (const inPath of inputPaths) { + await fsp.unlink(inPath) + } } - } - return assembly - })() - - jobsPromise.add(singleAssemblyPromise) - const results = await jobsPromise.allSettled() - resolve({ results, hasFailures: jobsPromise.hasFailures }) - }) - } else { - // Default mode: one assembly per file with concurrency limiting - // Queue jobs and limit how many run in parallel to avoid file descriptor exhaustion - interface QueuedJob { - inPath: string | null - outPath: string | null - outMtime: Date | undefined - } - const jobQueue: QueuedJob[] = [] - let activeCount = 0 - let emitterEnded = false - let resolveWhenDone: (() => void) | null = null - - function tryProcessNext(): void { - while (activeCount < concurrency && jobQueue.length > 0) { - const queuedJob = jobQueue.shift() - if (!queuedJob) break - activeCount++ - processJob(queuedJob) - } - // Check if we're done (emitter ended, queue empty, no active jobs) - if (emitterEnded && jobQueue.length === 0 && activeCount === 0 && resolveWhenDone) { - resolveWhenDone() - } - } - - function processJob(queuedJob: QueuedJob): void { - const { inPath, outPath, outMtime } = queuedJob - outputctl.debug(`PROCESSING JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) - - // Create fresh streams for this job - const inStream = inPath ? fs.createReadStream(inPath) : null - // Attach a no-op error handler to prevent unhandled errors if stream is destroyed - inStream?.on('error', () => {}) - const outStream = outPath ? (fs.createWriteStream(outPath) as OutStream) : null - // Attach a no-op error handler to prevent unhandled errors if stream is destroyed - outStream?.on('error', () => {}) - if (outStream) outStream.mtime = outMtime - - let superceded = false - if (outStream != null) - outStream.on('finish', () => { - superceded = true + return asm }) - - const createOptions: CreateAssemblyOptions = { - params, - signal: abortController.signal, - } - if (inStream != null) { - createOptions.uploads = { in: inStream } + results.push(assembly) + } catch (err) { + hasFailures = true + outputctl.error(err as Error) } - const jobPromise = (async () => { - const result = await client.createAssembly(createOptions) - if (superceded) return - - const assemblyId = result.assembly_id - if (!assemblyId) throw new Error('No assembly_id in result') - - // Use SDK's awaitAssemblyCompletion with onPoll to check for superceded jobs - const assembly = await client.awaitAssemblyCompletion(assemblyId, { - signal: abortController.signal, - onPoll: () => { - // Return false to stop polling if this job has been superceded (watch mode) - if (superceded) return false - return true - }, - onAssemblyProgress: (status) => { - outputctl.debug(`Assembly status: ${status.ok}`) - }, - }) - - // If superceded, exit early without processing results - if (superceded) return - - if (assembly.error || (assembly.ok && assembly.ok !== 'ASSEMBLY_COMPLETED')) { - const msg = `Assembly failed: ${assembly.error || assembly.message} (Status: ${assembly.ok})` - outputctl.error(msg) - throw new Error(msg) - } - - if (!assembly.results) throw new Error('No results in assembly') - const resultsKeys = Object.keys(assembly.results) - const firstKey = resultsKeys[0] - if (!firstKey) throw new Error('No results in assembly') - const firstResult = assembly.results[firstKey] - if (!firstResult || !firstResult[0]) throw new Error('No results in assembly') - const resulturl = firstResult[0].url - - if (outStream != null && resulturl && !superceded) { - outputctl.debug('DOWNLOADING') - const [dlErr] = await tryCatch( - pipeline(got.stream(resulturl, { signal: abortController.signal }), outStream), - ) - if (dlErr) { - if (dlErr.name !== 'AbortError') { - outputctl.error(dlErr.message) - throw dlErr - } - } - } - - outputctl.debug(`COMPLETED ${inPath ?? 'null'} ${outPath ?? 'null'}`) - - if (del && inPath) { - await fsp.unlink(inPath) - } - return assembly - })().finally(() => { - activeCount-- - tryProcessNext() - }) - - jobsPromise.add(jobPromise) - } - + resolve({ results, hasFailures }) + }) + } else { + // Default mode: one assembly per file with p-queue concurrency limiting emitter.on('job', (job: Job) => { - activeJobs.add(job) const inPath = job.in ? (((job.in as fs.ReadStream).path as string | undefined) ?? null) : null @@ -946,32 +910,32 @@ export default async function run( if (job.out != null) { job.out.destroy() } - activeJobs.delete(job) - // Queue the job metadata for later processing - jobQueue.push({ inPath, outPath, outMtime }) - tryProcessNext() + // Add job to queue - p-queue handles concurrency automatically + queue + .add(async () => { + const result = await processAssemblyJob(inPath, outPath, outMtime) + if (result !== undefined) { + results.push(result) + } + }) + .catch((err: unknown) => { + hasFailures = true + outputctl.error(err as Error) + }) }) emitter.on('error', (err: Error) => { - // Abort all in-flight createAssembly calls to ensure clean shutdown abortController.abort() - activeJobs.clear() - jobQueue.length = 0 // Clear the queue + queue.clear() outputctl.error(err) reject(err) }) emitter.on('end', async () => { - emitterEnded = true - // If there are still jobs in queue or active, wait for them - if (jobQueue.length > 0 || activeCount > 0) { - await new Promise((r) => { - resolveWhenDone = r - }) - } - const results = await jobsPromise.allSettled() - resolve({ results, hasFailures: jobsPromise.hasFailures }) + // Wait for all queued jobs to complete + await queue.onIdle() + resolve({ results, hasFailures }) }) } }) From d921f30d31743cf3368c96a37dd5e6b798efe91a Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Thu, 4 Dec 2025 16:42:52 +0100 Subject: [PATCH 42/45] natives --- src/Transloadit.ts | 3 +- src/cli/assemblies-create.ts | 15 +------ src/cli/assemblies.ts | 3 +- test/unit/cli/JobsPromise.test.ts | 72 ------------------------------- 4 files changed, 5 insertions(+), 88 deletions(-) delete mode 100644 test/unit/cli/JobsPromise.test.ts diff --git a/src/Transloadit.ts b/src/Transloadit.ts index f360c438..a093675e 100644 --- a/src/Transloadit.ts +++ b/src/Transloadit.ts @@ -3,6 +3,7 @@ import { createHmac, randomUUID } from 'node:crypto' import { constants, createReadStream } from 'node:fs' import { access } from 'node:fs/promises' import type { Readable } from 'node:stream' +import { setTimeout as delay } from 'node:timers/promises' import debug from 'debug' import FormData from 'form-data' import type { Delays, Headers, OptionsOfJSONResponseBody, RetryOptions } from 'got' @@ -962,7 +963,7 @@ export class Transloadit { const { retryIn: retryInSec } = body.info logWarn(`Rate limit reached, retrying request in approximately ${retryInSec} seconds.`) const retryInMs = 1000 * (retryInSec * (1 + 0.1 * Math.random())) - await new Promise((resolve) => setTimeout(resolve, retryInMs)) + await delay(retryInMs) // Retry } else { throw new ApiError({ diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index 565f3902..ffc25d0e 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -103,20 +103,7 @@ async function myStat( } async function ensureDir(dir: string): Promise { - try { - await fsp.mkdir(dir) - } catch (err) { - if (!isErrnoException(err)) throw err - if (err.code === 'EEXIST') { - const stats = await fsp.stat(dir) - if (!stats.isDirectory()) throw err - return - } - if (err.code !== 'ENOENT') throw err - - await ensureDir(path.dirname(dir)) - await fsp.mkdir(dir) - } + await fsp.mkdir(dir, { recursive: true }) } function dirProvider(output: string): OutstreamProvider { diff --git a/src/cli/assemblies.ts b/src/cli/assemblies.ts index cf541c34..225efa89 100644 --- a/src/cli/assemblies.ts +++ b/src/cli/assemblies.ts @@ -1,3 +1,4 @@ +import { setTimeout as delay } from 'node:timers/promises' import { z } from 'zod' import { tryCatch } from '../alphalib/tryCatch.ts' import type { Steps } from '../alphalib/types/template.ts' @@ -80,7 +81,7 @@ export async function get( { assemblies }: AssemblyGetOptions, ): Promise { for (const assembly of assemblies) { - await new Promise((resolve) => setTimeout(resolve, 1000)) + await delay(1000) const [err, result] = await tryCatch(client.getAssembly(assembly)) if (err) { output.error(formatAPIError(err)) diff --git a/test/unit/cli/JobsPromise.test.ts b/test/unit/cli/JobsPromise.test.ts deleted file mode 100644 index d86d449c..00000000 --- a/test/unit/cli/JobsPromise.test.ts +++ /dev/null @@ -1,72 +0,0 @@ -import { describe, expect, it, vi } from 'vitest' -import JobsPromise from '../../../src/cli/JobsPromise.ts' - -describe('JobsPromise', () => { - it('should call error handler when promise rejects', async () => { - const jobs = new JobsPromise() - const errorHandler = vi.fn() - jobs.setErrorHandler(errorHandler) - - const error = new Error('test error') - jobs.add(Promise.reject(error)) - - await jobs.allSettled() - - expect(errorHandler).toHaveBeenCalledWith(error) - }) - - it('should collect fulfilled values from allSettled', async () => { - const jobs = new JobsPromise() - jobs.setErrorHandler(() => {}) - - jobs.add(Promise.resolve('a')) - jobs.add(Promise.resolve('b')) - jobs.add(Promise.reject(new Error('ignored'))) - - const results = await jobs.allSettled() - - expect(results).toContain('a') - expect(results).toContain('b') - expect(results).toHaveLength(2) - }) - - it('should throw if error handler is not set and promise rejects', () => { - const jobs = new JobsPromise() - // Intentionally NOT setting error handler - - // Create a promise that we'll handle to avoid unhandled rejection - const rejectingPromise = Promise.reject(new Error('test')) - rejectingPromise.catch(() => {}) // Prevent unhandled rejection warning - - // This should throw because no error handler is set - expect(() => jobs.add(rejectingPromise)).toThrow( - 'JobsPromise: error handler must be set before adding promises', - ) - }) - - it('should track hasFailures when promise rejects', async () => { - const jobs = new JobsPromise() - jobs.setErrorHandler(() => {}) - - expect(jobs.hasFailures).toBe(false) - - jobs.add(Promise.resolve('ok')) - jobs.add(Promise.reject(new Error('fail'))) - - await jobs.allSettled() - - expect(jobs.hasFailures).toBe(true) - }) - - it('should have hasFailures false when all succeed', async () => { - const jobs = new JobsPromise() - jobs.setErrorHandler(() => {}) - - jobs.add(Promise.resolve('a')) - jobs.add(Promise.resolve('b')) - - await jobs.allSettled() - - expect(jobs.hasFailures).toBe(false) - }) -}) From 4299b8dfae83986788c175cdd210396567f00d99 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Thu, 4 Dec 2025 16:45:56 +0100 Subject: [PATCH 43/45] no esnureDir --- src/cli/assemblies-create.ts | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts index ffc25d0e..5fd324bb 100644 --- a/src/cli/assemblies-create.ts +++ b/src/cli/assemblies-create.ts @@ -102,10 +102,6 @@ async function myStat( return await fsp.stat(filepath) } -async function ensureDir(dir: string): Promise { - await fsp.mkdir(dir, { recursive: true }) -} - function dirProvider(output: string): OutstreamProvider { return async (inpath, indir = process.cwd()) => { if (inpath == null || inpath === '-') { @@ -117,7 +113,7 @@ function dirProvider(output: string): OutstreamProvider { const outpath = path.join(output, relpath) const outdir = path.dirname(outpath) - await ensureDir(outdir) + await fsp.mkdir(outdir, { recursive: true }) const [, stats] = await tryCatch(fsp.stat(outpath)) const mtime = stats?.mtime ?? new Date(0) const outstream = fs.createWriteStream(outpath) as OutStream @@ -130,7 +126,7 @@ function dirProvider(output: string): OutstreamProvider { } function fileProvider(output: string): OutstreamProvider { - const dirExistsP = ensureDir(path.dirname(output)) + const dirExistsP = fsp.mkdir(path.dirname(output), { recursive: true }) return async (_inpath) => { await dirExistsP if (output === '-') return process.stdout as OutStream From 9cceb6f07a666ba6eb79d403ecf56b799d3b8a32 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Thu, 4 Dec 2025 17:43:02 +0100 Subject: [PATCH 44/45] unify cli files --- src/cli/assemblies-create.ts | 925 -------------------- src/cli/assemblies.ts | 149 ---- src/cli/bills.ts | 36 - src/cli/commands/assemblies.ts | 1084 +++++++++++++++++++++++- src/cli/commands/bills.ts | 43 +- src/cli/commands/notifications.ts | 28 +- src/cli/commands/templates.ts | 344 +++++++- src/cli/notifications.ts | 21 - src/cli/templates.ts | 330 -------- test/e2e/cli/assemblies-create.test.ts | 2 +- test/e2e/cli/assemblies-list.test.ts | 2 +- test/e2e/cli/assemblies.test.ts | 2 +- test/e2e/cli/bills.test.ts | 2 +- test/e2e/cli/templates.test.ts | 2 +- 14 files changed, 1486 insertions(+), 1484 deletions(-) delete mode 100644 src/cli/assemblies-create.ts delete mode 100644 src/cli/assemblies.ts delete mode 100644 src/cli/bills.ts delete mode 100644 src/cli/notifications.ts delete mode 100644 src/cli/templates.ts diff --git a/src/cli/assemblies-create.ts b/src/cli/assemblies-create.ts deleted file mode 100644 index 5fd324bb..00000000 --- a/src/cli/assemblies-create.ts +++ /dev/null @@ -1,925 +0,0 @@ -import EventEmitter from 'node:events' -import fs from 'node:fs' -import fsp from 'node:fs/promises' -import path from 'node:path' -import process from 'node:process' -import type { Readable, Writable } from 'node:stream' -import { pipeline } from 'node:stream/promises' -import tty from 'node:tty' -import { promisify } from 'node:util' -import got from 'got' -import PQueue from 'p-queue' -import { tryCatch } from '../alphalib/tryCatch.ts' -import type { StepsInput } from '../alphalib/types/template.ts' -import type { CreateAssemblyParams } from '../apiTypes.ts' -import type { CreateAssemblyOptions, Transloadit } from '../Transloadit.ts' -import type { IOutputCtl } from './OutputCtl.ts' -import { isErrnoException } from './types.ts' - -interface NodeWatcher { - on(event: 'error', listener: (err: Error) => void): void - on(event: 'close', listener: () => void): void - on(event: 'change', listener: (evt: string, filename: string) => void): void - on(event: string, listener: (...args: unknown[]) => void): void - close(): void -} - -type NodeWatchFn = (path: string, options?: { recursive?: boolean }) => NodeWatcher - -let nodeWatch: NodeWatchFn | undefined - -async function getNodeWatch(): Promise { - if (!nodeWatch) { - const mod = (await import('node-watch')) as unknown as { default: NodeWatchFn } - nodeWatch = mod.default - } - return nodeWatch -} - -// workaround for determining mime-type of stdin -const stdinWithPath = process.stdin as unknown as { path: string } -stdinWithPath.path = '/dev/stdin' - -interface OutStream extends Writable { - path?: string - mtime?: Date -} - -interface Job { - in: Readable | null - out: OutStream | null -} - -type OutstreamProvider = (inpath: string | null, indir?: string) => Promise - -interface StreamRegistry { - [key: string]: OutStream | undefined -} - -interface JobEmitterOptions { - recursive?: boolean - outstreamProvider: OutstreamProvider - streamRegistry: StreamRegistry - watch?: boolean - reprocessStale?: boolean -} - -interface ReaddirJobEmitterOptions { - dir: string - streamRegistry: StreamRegistry - recursive?: boolean - outstreamProvider: OutstreamProvider - topdir?: string -} - -interface SingleJobEmitterOptions { - file: string - streamRegistry: StreamRegistry - outstreamProvider: OutstreamProvider -} - -interface WatchJobEmitterOptions { - file: string - streamRegistry: StreamRegistry - recursive?: boolean - outstreamProvider: OutstreamProvider -} - -interface StatLike { - isDirectory(): boolean -} - -const fstatAsync = promisify(fs.fstat) - -async function myStat( - stdioStream: NodeJS.ReadStream | NodeJS.WriteStream, - filepath: string, -): Promise { - if (filepath === '-') { - const stream = stdioStream as NodeJS.ReadStream & { fd: number } - return await fstatAsync(stream.fd) - } - return await fsp.stat(filepath) -} - -function dirProvider(output: string): OutstreamProvider { - return async (inpath, indir = process.cwd()) => { - if (inpath == null || inpath === '-') { - throw new Error('You must provide an input to output to a directory') - } - - let relpath = path.relative(indir, inpath) - relpath = relpath.replace(/^(\.\.\/)+/, '') - const outpath = path.join(output, relpath) - const outdir = path.dirname(outpath) - - await fsp.mkdir(outdir, { recursive: true }) - const [, stats] = await tryCatch(fsp.stat(outpath)) - const mtime = stats?.mtime ?? new Date(0) - const outstream = fs.createWriteStream(outpath) as OutStream - // Attach a no-op error handler to prevent unhandled errors if stream is destroyed - // before being consumed (e.g., due to output collision detection) - outstream.on('error', () => {}) - outstream.mtime = mtime - return outstream - } -} - -function fileProvider(output: string): OutstreamProvider { - const dirExistsP = fsp.mkdir(path.dirname(output), { recursive: true }) - return async (_inpath) => { - await dirExistsP - if (output === '-') return process.stdout as OutStream - - const [, stats] = await tryCatch(fsp.stat(output)) - const mtime = stats?.mtime ?? new Date(0) - const outstream = fs.createWriteStream(output) as OutStream - // Attach a no-op error handler to prevent unhandled errors if stream is destroyed - // before being consumed (e.g., due to output collision detection) - outstream.on('error', () => {}) - outstream.mtime = mtime - return outstream - } -} - -function nullProvider(): OutstreamProvider { - return async (_inpath) => null -} - -class MyEventEmitter extends EventEmitter { - protected hasEnded: boolean - - constructor() { - super() - this.hasEnded = false - } - - override emit(event: string | symbol, ...args: unknown[]): boolean { - if (this.hasEnded) return false - if (event === 'end' || event === 'error') { - this.hasEnded = true - return super.emit(event, ...args) - } - return super.emit(event, ...args) - } -} - -class ReaddirJobEmitter extends MyEventEmitter { - constructor({ - dir, - streamRegistry, - recursive, - outstreamProvider, - topdir = dir, - }: ReaddirJobEmitterOptions) { - super() - - process.nextTick(() => { - this.processDirectory({ dir, streamRegistry, recursive, outstreamProvider, topdir }).catch( - (err) => { - this.emit('error', err) - }, - ) - }) - } - - private async processDirectory({ - dir, - streamRegistry, - recursive, - outstreamProvider, - topdir, - }: ReaddirJobEmitterOptions & { topdir: string }): Promise { - const files = await fsp.readdir(dir) - - const pendingOperations: Promise[] = [] - - for (const filename of files) { - const file = path.normalize(path.join(dir, filename)) - pendingOperations.push( - this.processFile({ file, streamRegistry, recursive, outstreamProvider, topdir }), - ) - } - - await Promise.all(pendingOperations) - this.emit('end') - } - - private async processFile({ - file, - streamRegistry, - recursive = false, - outstreamProvider, - topdir, - }: { - file: string - streamRegistry: StreamRegistry - recursive?: boolean - outstreamProvider: OutstreamProvider - topdir: string - }): Promise { - const stats = await fsp.stat(file) - - if (stats.isDirectory()) { - if (recursive) { - await new Promise((resolve, reject) => { - const subdirEmitter = new ReaddirJobEmitter({ - dir: file, - streamRegistry, - recursive, - outstreamProvider, - topdir, - }) - subdirEmitter.on('job', (job: Job) => this.emit('job', job)) - subdirEmitter.on('error', (error: Error) => reject(error)) - subdirEmitter.on('end', () => resolve()) - }) - } - } else { - const existing = streamRegistry[file] - if (existing) existing.end() - const outstream = await outstreamProvider(file, topdir) - streamRegistry[file] = outstream ?? undefined - const instream = fs.createReadStream(file) - // Attach a no-op error handler to prevent unhandled errors if stream is destroyed - // before being consumed (e.g., due to output collision detection) - instream.on('error', () => {}) - this.emit('job', { in: instream, out: outstream }) - } - } -} - -class SingleJobEmitter extends MyEventEmitter { - constructor({ file, streamRegistry, outstreamProvider }: SingleJobEmitterOptions) { - super() - - const normalizedFile = path.normalize(file) - const existing = streamRegistry[normalizedFile] - if (existing) existing.end() - outstreamProvider(normalizedFile).then((outstream) => { - streamRegistry[normalizedFile] = outstream ?? undefined - - let instream: Readable | null - if (normalizedFile === '-') { - if (tty.isatty(process.stdin.fd)) { - instream = null - } else { - instream = process.stdin - } - } else { - instream = fs.createReadStream(normalizedFile) - // Attach a no-op error handler to prevent unhandled errors if stream is destroyed - // before being consumed (e.g., due to output collision detection) - instream.on('error', () => {}) - } - - process.nextTick(() => { - this.emit('job', { in: instream, out: outstream }) - this.emit('end') - }) - }) - } -} - -class InputlessJobEmitter extends MyEventEmitter { - constructor({ - outstreamProvider, - }: { streamRegistry: StreamRegistry; outstreamProvider: OutstreamProvider }) { - super() - - process.nextTick(() => { - outstreamProvider(null).then((outstream) => { - try { - this.emit('job', { in: null, out: outstream }) - } catch (err) { - this.emit('error', err) - } - - this.emit('end') - }) - }) - } -} - -class NullJobEmitter extends MyEventEmitter { - constructor() { - super() - process.nextTick(() => this.emit('end')) - } -} - -class WatchJobEmitter extends MyEventEmitter { - private watcher: NodeWatcher | null = null - - constructor({ file, streamRegistry, recursive, outstreamProvider }: WatchJobEmitterOptions) { - super() - - this.init({ file, streamRegistry, recursive, outstreamProvider }).catch((err) => { - this.emit('error', err) - }) - - // Clean up watcher on process exit signals - const cleanup = () => this.close() - process.once('SIGINT', cleanup) - process.once('SIGTERM', cleanup) - } - - /** Close the file watcher and release resources */ - close(): void { - if (this.watcher) { - this.watcher.close() - this.watcher = null - } - } - - private async init({ - file, - streamRegistry, - recursive, - outstreamProvider, - }: WatchJobEmitterOptions): Promise { - const stats = await fsp.stat(file) - const topdir = stats.isDirectory() ? file : undefined - - const watchFn = await getNodeWatch() - this.watcher = watchFn(file, { recursive }) - - this.watcher.on('error', (err: Error) => { - this.close() - this.emit('error', err) - }) - this.watcher.on('close', () => this.emit('end')) - this.watcher.on('change', (_evt: string, filename: string) => { - const normalizedFile = path.normalize(filename) - this.handleChange(normalizedFile, topdir, streamRegistry, outstreamProvider).catch((err) => { - this.emit('error', err) - }) - }) - } - - private async handleChange( - normalizedFile: string, - topdir: string | undefined, - streamRegistry: StreamRegistry, - outstreamProvider: OutstreamProvider, - ): Promise { - const stats = await fsp.stat(normalizedFile) - if (stats.isDirectory()) return - - const existing = streamRegistry[normalizedFile] - if (existing) existing.end() - - const outstream = await outstreamProvider(normalizedFile, topdir) - streamRegistry[normalizedFile] = outstream ?? undefined - - const instream = fs.createReadStream(normalizedFile) - // Attach a no-op error handler to prevent unhandled errors if stream is destroyed - // before being consumed (e.g., due to output collision detection) - instream.on('error', () => {}) - this.emit('job', { in: instream, out: outstream }) - } -} - -class MergedJobEmitter extends MyEventEmitter { - constructor(...jobEmitters: MyEventEmitter[]) { - super() - - let ncomplete = 0 - - for (const jobEmitter of jobEmitters) { - jobEmitter.on('error', (err: Error) => this.emit('error', err)) - jobEmitter.on('job', (job: Job) => this.emit('job', job)) - jobEmitter.on('end', () => { - if (++ncomplete === jobEmitters.length) this.emit('end') - }) - } - - if (jobEmitters.length === 0) { - this.emit('end') - } - } -} - -class ConcattedJobEmitter extends MyEventEmitter { - constructor(emitterFn: () => MyEventEmitter, ...emitterFns: (() => MyEventEmitter)[]) { - super() - - const emitter = emitterFn() - - emitter.on('error', (err: Error) => this.emit('error', err)) - emitter.on('job', (job: Job) => this.emit('job', job)) - - if (emitterFns.length === 0) { - emitter.on('end', () => this.emit('end')) - } else { - emitter.on('end', () => { - const firstFn = emitterFns[0] - if (!firstFn) { - this.emit('end') - return - } - const restEmitter = new ConcattedJobEmitter(firstFn, ...emitterFns.slice(1)) - restEmitter.on('error', (err: Error) => this.emit('error', err)) - restEmitter.on('job', (job: Job) => this.emit('job', job)) - restEmitter.on('end', () => this.emit('end')) - }) - } - } -} - -function detectConflicts(jobEmitter: EventEmitter): MyEventEmitter { - const emitter = new MyEventEmitter() - const outfileAssociations: Record = {} - - jobEmitter.on('end', () => emitter.emit('end')) - jobEmitter.on('error', (err: Error) => emitter.emit('error', err)) - jobEmitter.on('job', (job: Job) => { - if (job.in == null || job.out == null) { - emitter.emit('job', job) - return - } - const inPath = (job.in as fs.ReadStream).path as string - const outPath = job.out.path as string - if (Object.hasOwn(outfileAssociations, outPath) && outfileAssociations[outPath] !== inPath) { - emitter.emit( - 'error', - new Error(`Output collision between '${inPath}' and '${outfileAssociations[outPath]}'`), - ) - } else { - outfileAssociations[outPath] = inPath - emitter.emit('job', job) - } - }) - - return emitter -} - -function dismissStaleJobs(jobEmitter: EventEmitter): MyEventEmitter { - const emitter = new MyEventEmitter() - const pendingChecks: Promise[] = [] - - jobEmitter.on('end', () => Promise.all(pendingChecks).then(() => emitter.emit('end'))) - jobEmitter.on('error', (err: Error) => emitter.emit('error', err)) - jobEmitter.on('job', (job: Job) => { - if (job.in == null || job.out == null) { - emitter.emit('job', job) - return - } - - const inPath = (job.in as fs.ReadStream).path as string - const checkPromise = fsp - .stat(inPath) - .then((stats) => { - const inM = stats.mtime - const outM = job.out?.mtime ?? new Date(0) - - if (outM <= inM) emitter.emit('job', job) - }) - .catch(() => { - emitter.emit('job', job) - }) - pendingChecks.push(checkPromise) - }) - - return emitter -} - -function makeJobEmitter( - inputs: string[], - { - recursive, - outstreamProvider, - streamRegistry, - watch: watchOption, - reprocessStale, - }: JobEmitterOptions, -): MyEventEmitter { - const emitter = new EventEmitter() - - const emitterFns: (() => MyEventEmitter)[] = [] - const watcherFns: (() => MyEventEmitter)[] = [] - - async function processInputs(): Promise { - for (const input of inputs) { - if (input === '-') { - emitterFns.push( - () => new SingleJobEmitter({ file: input, outstreamProvider, streamRegistry }), - ) - watcherFns.push(() => new NullJobEmitter()) - } else { - const stats = await fsp.stat(input) - if (stats.isDirectory()) { - emitterFns.push( - () => - new ReaddirJobEmitter({ dir: input, recursive, outstreamProvider, streamRegistry }), - ) - watcherFns.push( - () => - new WatchJobEmitter({ file: input, recursive, outstreamProvider, streamRegistry }), - ) - } else { - emitterFns.push( - () => new SingleJobEmitter({ file: input, outstreamProvider, streamRegistry }), - ) - watcherFns.push( - () => - new WatchJobEmitter({ file: input, recursive, outstreamProvider, streamRegistry }), - ) - } - } - } - - if (inputs.length === 0) { - emitterFns.push(() => new InputlessJobEmitter({ outstreamProvider, streamRegistry })) - } - - startEmitting() - } - - function startEmitting(): void { - let source: MyEventEmitter = new MergedJobEmitter(...emitterFns.map((f) => f())) - - if (watchOption) { - source = new ConcattedJobEmitter( - () => source, - () => new MergedJobEmitter(...watcherFns.map((f) => f())), - ) - } - - source.on('job', (job: Job) => emitter.emit('job', job)) - source.on('error', (err: Error) => emitter.emit('error', err)) - source.on('end', () => emitter.emit('end')) - } - - processInputs().catch((err) => { - emitter.emit('error', err) - }) - - const stalefilter = reprocessStale ? (x: EventEmitter) => x as MyEventEmitter : dismissStaleJobs - return stalefilter(detectConflicts(emitter)) -} - -export interface AssembliesCreateOptions { - steps?: string - template?: string - fields?: Record - watch?: boolean - recursive?: boolean - inputs: string[] - output?: string | null - del?: boolean - reprocessStale?: boolean - singleAssembly?: boolean - concurrency?: number -} - -const DEFAULT_CONCURRENCY = 5 - -export default async function run( - outputctl: IOutputCtl, - client: Transloadit, - { - steps, - template, - fields, - watch: watchOption, - recursive, - inputs, - output, - del, - reprocessStale, - singleAssembly, - concurrency = DEFAULT_CONCURRENCY, - }: AssembliesCreateOptions, -): Promise<{ results: unknown[]; hasFailures: boolean }> { - // Quick fix for https://github.com/transloadit/transloadify/issues/13 - // Only default to stdout when output is undefined (not provided), not when explicitly null - let resolvedOutput = output - if (resolvedOutput === undefined && !process.stdout.isTTY) resolvedOutput = '-' - - // Read steps file async before entering the Promise constructor - // We use StepsInput (the input type) rather than Steps (the transformed output type) - // to avoid zod adding default values that the API may reject - let stepsData: StepsInput | undefined - if (steps) { - const stepsContent = await fsp.readFile(steps, 'utf8') - const parsed: unknown = JSON.parse(stepsContent) - // Basic structural validation: must be an object with step names as keys - if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { - throw new Error('Invalid steps format: expected an object with step names as keys') - } - // Validate each step has a robot field - for (const [stepName, step] of Object.entries(parsed)) { - if (step == null || typeof step !== 'object' || Array.isArray(step)) { - throw new Error(`Invalid steps format: step '${stepName}' must be an object`) - } - if (!('robot' in step) || typeof (step as Record).robot !== 'string') { - throw new Error( - `Invalid steps format: step '${stepName}' must have a 'robot' string property`, - ) - } - } - stepsData = parsed as StepsInput - } - - // Determine output stat async before entering the Promise constructor - let outstat: StatLike | undefined - if (resolvedOutput != null) { - const [err, stat] = await tryCatch(myStat(process.stdout, resolvedOutput)) - if (err && (!isErrnoException(err) || err.code !== 'ENOENT')) throw err - outstat = stat ?? { isDirectory: () => false } - - if (!outstat.isDirectory() && inputs.length !== 0) { - const firstInput = inputs[0] - if (firstInput) { - const firstInputStat = await myStat(process.stdin, firstInput) - if (inputs.length > 1 || firstInputStat.isDirectory()) { - const msg = 'Output must be a directory when specifying multiple inputs' - outputctl.error(msg) - throw new Error(msg) - } - } - } - } - - return new Promise((resolve, reject) => { - const params: CreateAssemblyParams = ( - stepsData ? { steps: stepsData as CreateAssemblyParams['steps'] } : { template_id: template } - ) as CreateAssemblyParams - if (fields) { - params.fields = fields - } - - const outstreamProvider: OutstreamProvider = - resolvedOutput == null - ? nullProvider() - : outstat?.isDirectory() - ? dirProvider(resolvedOutput) - : fileProvider(resolvedOutput) - const streamRegistry: StreamRegistry = {} - - const emitter = makeJobEmitter(inputs, { - recursive, - watch: watchOption, - outstreamProvider, - streamRegistry, - reprocessStale, - }) - - // Use p-queue for concurrency management - const queue = new PQueue({ concurrency }) - const results: unknown[] = [] - let hasFailures = false - // AbortController to cancel all in-flight createAssembly calls when an error occurs - const abortController = new AbortController() - - // Helper to process a single assembly job - async function processAssemblyJob( - inPath: string | null, - outPath: string | null, - outMtime: Date | undefined, - ): Promise { - outputctl.debug(`PROCESSING JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) - - // Create fresh streams for this job - const inStream = inPath ? fs.createReadStream(inPath) : null - inStream?.on('error', () => {}) - const outStream = outPath ? (fs.createWriteStream(outPath) as OutStream) : null - outStream?.on('error', () => {}) - if (outStream) outStream.mtime = outMtime - - let superceded = false - if (outStream != null) { - outStream.on('finish', () => { - superceded = true - }) - } - - const createOptions: CreateAssemblyOptions = { - params, - signal: abortController.signal, - } - if (inStream != null) { - createOptions.uploads = { in: inStream } - } - - const result = await client.createAssembly(createOptions) - if (superceded) return undefined - - const assemblyId = result.assembly_id - if (!assemblyId) throw new Error('No assembly_id in result') - - const assembly = await client.awaitAssemblyCompletion(assemblyId, { - signal: abortController.signal, - onPoll: () => { - if (superceded) return false - return true - }, - onAssemblyProgress: (status) => { - outputctl.debug(`Assembly status: ${status.ok}`) - }, - }) - - if (superceded) return undefined - - if (assembly.error || (assembly.ok && assembly.ok !== 'ASSEMBLY_COMPLETED')) { - const msg = `Assembly failed: ${assembly.error || assembly.message} (Status: ${assembly.ok})` - outputctl.error(msg) - throw new Error(msg) - } - - if (!assembly.results) throw new Error('No results in assembly') - const resultsKeys = Object.keys(assembly.results) - const firstKey = resultsKeys[0] - if (!firstKey) throw new Error('No results in assembly') - const firstResult = assembly.results[firstKey] - if (!firstResult || !firstResult[0]) throw new Error('No results in assembly') - const resulturl = firstResult[0].url - - if (outStream != null && resulturl && !superceded) { - outputctl.debug('DOWNLOADING') - const [dlErr] = await tryCatch( - pipeline(got.stream(resulturl, { signal: abortController.signal }), outStream), - ) - if (dlErr) { - if (dlErr.name !== 'AbortError') { - outputctl.error(dlErr.message) - throw dlErr - } - } - } - - outputctl.debug(`COMPLETED ${inPath ?? 'null'} ${outPath ?? 'null'}`) - - if (del && inPath) { - await fsp.unlink(inPath) - } - return assembly - } - - if (singleAssembly) { - // Single-assembly mode: collect file paths, then create one assembly with all inputs - // We close streams immediately to avoid exhausting file descriptors with many files - const collectedPaths: string[] = [] - - emitter.on('job', (job: Job) => { - if (job.in != null) { - const inPath = (job.in as fs.ReadStream).path as string - outputctl.debug(`COLLECTING JOB ${inPath}`) - collectedPaths.push(inPath) - // Close the stream immediately to avoid file descriptor exhaustion - ;(job.in as fs.ReadStream).destroy() - outputctl.debug(`STREAM CLOSED ${inPath}`) - } - }) - - emitter.on('error', (err: Error) => { - abortController.abort() - queue.clear() - outputctl.error(err) - reject(err) - }) - - emitter.on('end', async () => { - if (collectedPaths.length === 0) { - resolve({ results: [], hasFailures: false }) - return - } - - // Build uploads object, creating fresh streams for each file - const uploads: Record = {} - const inputPaths: string[] = [] - for (const inPath of collectedPaths) { - const basename = path.basename(inPath) - let key = basename - let counter = 1 - while (key in uploads) { - key = `${path.parse(basename).name}_${counter}${path.parse(basename).ext}` - counter++ - } - uploads[key] = fs.createReadStream(inPath) - inputPaths.push(inPath) - } - - outputctl.debug(`Creating single assembly with ${Object.keys(uploads).length} files`) - - try { - const assembly = await queue.add(async () => { - const createOptions: CreateAssemblyOptions = { - params, - signal: abortController.signal, - } - if (Object.keys(uploads).length > 0) { - createOptions.uploads = uploads - } - - const result = await client.createAssembly(createOptions) - const assemblyId = result.assembly_id - if (!assemblyId) throw new Error('No assembly_id in result') - - const asm = await client.awaitAssemblyCompletion(assemblyId, { - signal: abortController.signal, - onAssemblyProgress: (status) => { - outputctl.debug(`Assembly status: ${status.ok}`) - }, - }) - - if (asm.error || (asm.ok && asm.ok !== 'ASSEMBLY_COMPLETED')) { - const msg = `Assembly failed: ${asm.error || asm.message} (Status: ${asm.ok})` - outputctl.error(msg) - throw new Error(msg) - } - - // Download all results - if (asm.results && resolvedOutput != null) { - for (const [stepName, stepResults] of Object.entries(asm.results)) { - for (const stepResult of stepResults) { - const resultUrl = stepResult.url - if (!resultUrl) continue - - let outPath: string - if (outstat?.isDirectory()) { - outPath = path.join(resolvedOutput, stepResult.name || `${stepName}_result`) - } else { - outPath = resolvedOutput - } - - outputctl.debug(`DOWNLOADING ${stepResult.name} to ${outPath}`) - const [dlErr] = await tryCatch( - pipeline( - got.stream(resultUrl, { signal: abortController.signal }), - fs.createWriteStream(outPath), - ), - ) - if (dlErr) { - if (dlErr.name === 'AbortError') continue - outputctl.error(dlErr.message) - throw dlErr - } - } - } - } - - // Delete input files if requested - if (del) { - for (const inPath of inputPaths) { - await fsp.unlink(inPath) - } - } - return asm - }) - results.push(assembly) - } catch (err) { - hasFailures = true - outputctl.error(err as Error) - } - - resolve({ results, hasFailures }) - }) - } else { - // Default mode: one assembly per file with p-queue concurrency limiting - emitter.on('job', (job: Job) => { - const inPath = job.in - ? (((job.in as fs.ReadStream).path as string | undefined) ?? null) - : null - const outPath = job.out?.path ?? null - const outMtime = job.out?.mtime - outputctl.debug(`GOT JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) - - // Close the original streams immediately - we'll create fresh ones when processing - if (job.in != null) { - ;(job.in as fs.ReadStream).destroy() - } - if (job.out != null) { - job.out.destroy() - } - - // Add job to queue - p-queue handles concurrency automatically - queue - .add(async () => { - const result = await processAssemblyJob(inPath, outPath, outMtime) - if (result !== undefined) { - results.push(result) - } - }) - .catch((err: unknown) => { - hasFailures = true - outputctl.error(err as Error) - }) - }) - - emitter.on('error', (err: Error) => { - abortController.abort() - queue.clear() - outputctl.error(err) - reject(err) - }) - - emitter.on('end', async () => { - // Wait for all queued jobs to complete - await queue.onIdle() - resolve({ results, hasFailures }) - }) - } - }) -} diff --git a/src/cli/assemblies.ts b/src/cli/assemblies.ts deleted file mode 100644 index 225efa89..00000000 --- a/src/cli/assemblies.ts +++ /dev/null @@ -1,149 +0,0 @@ -import { setTimeout as delay } from 'node:timers/promises' -import { z } from 'zod' -import { tryCatch } from '../alphalib/tryCatch.ts' -import type { Steps } from '../alphalib/types/template.ts' -import { stepsSchema } from '../alphalib/types/template.ts' -import type { ReplayAssemblyParams } from '../apiTypes.ts' -import type { Transloadit } from '../Transloadit.ts' -import assembliesCreate from './assemblies-create.ts' -import { createReadStream, formatAPIError, streamToBuffer } from './helpers.ts' -import type { IOutputCtl } from './OutputCtl.ts' -import { ensureError } from './types.ts' - -export const create = assembliesCreate - -export interface AssemblyListOptions { - before?: string - after?: string - fields?: string[] - keywords?: string[] - pagesize?: number -} - -export interface AssemblyGetOptions { - assemblies: string[] -} - -export interface AssemblyDeleteOptions { - assemblies: string[] -} - -export interface AssemblyReplayOptions { - fields?: Record - reparse?: boolean - steps?: string - notify_url?: string - assemblies: string[] -} - -const AssemblySchema = z.object({ - id: z.string(), -}) - -export function list( - output: IOutputCtl, - client: Transloadit, - { before, after, fields, keywords }: AssemblyListOptions, -): Promise { - const assemblies = client.streamAssemblies({ - fromdate: after, - todate: before, - keywords, - }) - - assemblies.on('readable', () => { - const assembly: unknown = assemblies.read() - if (assembly == null) return - - const parsed = AssemblySchema.safeParse(assembly) - if (!parsed.success) return - - if (fields == null) { - output.print(parsed.data.id, assembly) - } else { - const assemblyRecord = assembly as Record - output.print(fields.map((field) => assemblyRecord[field]).join(' '), assembly) - } - }) - - return new Promise((resolve) => { - assemblies.on('end', resolve) - assemblies.on('error', (err: unknown) => { - output.error(formatAPIError(err)) - resolve() - }) - }) -} - -export async function get( - output: IOutputCtl, - client: Transloadit, - { assemblies }: AssemblyGetOptions, -): Promise { - for (const assembly of assemblies) { - await delay(1000) - const [err, result] = await tryCatch(client.getAssembly(assembly)) - if (err) { - output.error(formatAPIError(err)) - throw ensureError(err) - } - output.print(result, result) - } -} - -async function _delete( - output: IOutputCtl, - client: Transloadit, - { assemblies }: AssemblyDeleteOptions, -): Promise { - const promises = assemblies.map(async (assembly) => { - const [err] = await tryCatch(client.cancelAssembly(assembly)) - if (err) { - output.error(formatAPIError(err)) - } - }) - await Promise.all(promises) -} - -export { _delete as delete } - -export async function replay( - output: IOutputCtl, - client: Transloadit, - { fields, reparse, steps, notify_url, assemblies }: AssemblyReplayOptions, -): Promise { - if (steps) { - try { - const buf = await streamToBuffer(createReadStream(steps)) - const parsed: unknown = JSON.parse(buf.toString()) - const validated = stepsSchema.safeParse(parsed) - if (!validated.success) { - throw new Error(`Invalid steps format: ${validated.error.message}`) - } - await apiCall(validated.data) - } catch (err) { - const error = ensureError(err) - output.error(error.message) - } - } else { - await apiCall() - } - - async function apiCall(stepsOverride?: Steps): Promise { - const promises = assemblies.map(async (assembly) => { - const [err] = await tryCatch( - client.replayAssembly(assembly, { - reparse_template: reparse ? 1 : 0, - fields, - notify_url, - // Steps (validated) is assignable to StepsInput at runtime; cast for TS - steps: stepsOverride as ReplayAssemblyParams['steps'], - }), - ) - if (err) { - output.error(formatAPIError(err)) - } - }) - await Promise.all(promises) - } -} diff --git a/src/cli/bills.ts b/src/cli/bills.ts deleted file mode 100644 index 5a0a0a88..00000000 --- a/src/cli/bills.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { z } from 'zod' -import { tryCatch } from '../alphalib/tryCatch.ts' -import type { Transloadit } from '../Transloadit.ts' -import { formatAPIError } from './helpers.ts' -import type { IOutputCtl } from './OutputCtl.ts' - -export interface BillsGetOptions { - months: string[] -} - -const BillResponseSchema = z.object({ - total: z.number(), -}) - -export async function get( - output: IOutputCtl, - client: Transloadit, - { months }: BillsGetOptions, -): Promise { - const requests = months.map((month) => client.getBill(month)) - - const [err, results] = await tryCatch(Promise.all(requests)) - if (err) { - output.error(formatAPIError(err)) - return - } - - for (const result of results) { - const parsed = BillResponseSchema.safeParse(result) - if (parsed.success) { - output.print(`$${parsed.data.total}`, result) - } else { - output.print('Unable to parse bill response', result) - } - } -} diff --git a/src/cli/commands/assemblies.ts b/src/cli/commands/assemblies.ts index af724a4c..0a0e307d 100644 --- a/src/cli/commands/assemblies.ts +++ b/src/cli/commands/assemblies.ts @@ -1,10 +1,1080 @@ +import EventEmitter from 'node:events' +import fs from 'node:fs' +import fsp from 'node:fs/promises' +import path from 'node:path' import process from 'node:process' +import type { Readable, Writable } from 'node:stream' +import { pipeline } from 'node:stream/promises' +import { setTimeout as delay } from 'node:timers/promises' +import tty from 'node:tty' +import { promisify } from 'node:util' import { Command, Option } from 'clipanion' +import got from 'got' +import PQueue from 'p-queue' import * as t from 'typanion' -import * as assemblies from '../assemblies.ts' -import assembliesCreate from '../assemblies-create.ts' +import { z } from 'zod' +import { tryCatch } from '../../alphalib/tryCatch.ts' +import type { Steps, StepsInput } from '../../alphalib/types/template.ts' +import { stepsSchema } from '../../alphalib/types/template.ts' +import type { CreateAssemblyParams, ReplayAssemblyParams } from '../../apiTypes.ts' +import type { CreateAssemblyOptions, Transloadit } from '../../Transloadit.ts' +import { createReadStream, formatAPIError, streamToBuffer } from '../helpers.ts' +import type { IOutputCtl } from '../OutputCtl.ts' +import { ensureError, isErrnoException } from '../types.ts' import { AuthenticatedCommand } from './BaseCommand.ts' + +// --- From assemblies.ts: Schemas and interfaces --- +export interface AssemblyListOptions { + before?: string + after?: string + fields?: string[] + keywords?: string[] + pagesize?: number +} + +export interface AssemblyGetOptions { + assemblies: string[] +} + +export interface AssemblyDeleteOptions { + assemblies: string[] +} + +export interface AssemblyReplayOptions { + fields?: Record + reparse?: boolean + steps?: string + notify_url?: string + assemblies: string[] +} + +const AssemblySchema = z.object({ + id: z.string(), +}) + +// --- Business logic functions (from assemblies.ts) --- + +export function list( + output: IOutputCtl, + client: Transloadit, + { before, after, fields, keywords }: AssemblyListOptions, +): Promise { + const assemblies = client.streamAssemblies({ + fromdate: after, + todate: before, + keywords, + }) + + assemblies.on('readable', () => { + const assembly: unknown = assemblies.read() + if (assembly == null) return + + const parsed = AssemblySchema.safeParse(assembly) + if (!parsed.success) return + + if (fields == null) { + output.print(parsed.data.id, assembly) + } else { + const assemblyRecord = assembly as Record + output.print(fields.map((field) => assemblyRecord[field]).join(' '), assembly) + } + }) + + return new Promise((resolve) => { + assemblies.on('end', resolve) + assemblies.on('error', (err: unknown) => { + output.error(formatAPIError(err)) + resolve() + }) + }) +} + +export async function get( + output: IOutputCtl, + client: Transloadit, + { assemblies }: AssemblyGetOptions, +): Promise { + for (const assembly of assemblies) { + await delay(1000) + const [err, result] = await tryCatch(client.getAssembly(assembly)) + if (err) { + output.error(formatAPIError(err)) + throw ensureError(err) + } + output.print(result, result) + } +} + +async function deleteAssemblies( + output: IOutputCtl, + client: Transloadit, + { assemblies }: AssemblyDeleteOptions, +): Promise { + const promises = assemblies.map(async (assembly) => { + const [err] = await tryCatch(client.cancelAssembly(assembly)) + if (err) { + output.error(formatAPIError(err)) + } + }) + await Promise.all(promises) +} + +// Export with `delete` alias for tests (can't use `delete` as function name) +export { deleteAssemblies as delete } + +export async function replay( + output: IOutputCtl, + client: Transloadit, + { fields, reparse, steps, notify_url, assemblies }: AssemblyReplayOptions, +): Promise { + if (steps) { + try { + const buf = await streamToBuffer(createReadStream(steps)) + const parsed: unknown = JSON.parse(buf.toString()) + const validated = stepsSchema.safeParse(parsed) + if (!validated.success) { + throw new Error(`Invalid steps format: ${validated.error.message}`) + } + await apiCall(validated.data) + } catch (err) { + const error = ensureError(err) + output.error(error.message) + } + } else { + await apiCall() + } + + async function apiCall(stepsOverride?: Steps): Promise { + const promises = assemblies.map(async (assembly) => { + const [err] = await tryCatch( + client.replayAssembly(assembly, { + reparse_template: reparse ? 1 : 0, + fields, + notify_url, + // Steps (validated) is assignable to StepsInput at runtime; cast for TS + steps: stepsOverride as ReplayAssemblyParams['steps'], + }), + ) + if (err) { + output.error(formatAPIError(err)) + } + }) + await Promise.all(promises) + } +} + +// --- From assemblies-create.ts: Helper classes and functions --- +interface NodeWatcher { + on(event: 'error', listener: (err: Error) => void): void + on(event: 'close', listener: () => void): void + on(event: 'change', listener: (evt: string, filename: string) => void): void + on(event: string, listener: (...args: unknown[]) => void): void + close(): void +} + +type NodeWatchFn = (path: string, options?: { recursive?: boolean }) => NodeWatcher + +let nodeWatch: NodeWatchFn | undefined + +async function getNodeWatch(): Promise { + if (!nodeWatch) { + const mod = (await import('node-watch')) as unknown as { default: NodeWatchFn } + nodeWatch = mod.default + } + return nodeWatch +} + +// workaround for determining mime-type of stdin +const stdinWithPath = process.stdin as unknown as { path: string } +stdinWithPath.path = '/dev/stdin' + +interface OutStream extends Writable { + path?: string + mtime?: Date +} + +interface Job { + in: Readable | null + out: OutStream | null +} + +type OutstreamProvider = (inpath: string | null, indir?: string) => Promise + +interface StreamRegistry { + [key: string]: OutStream | undefined +} + +interface JobEmitterOptions { + recursive?: boolean + outstreamProvider: OutstreamProvider + streamRegistry: StreamRegistry + watch?: boolean + reprocessStale?: boolean +} + +interface ReaddirJobEmitterOptions { + dir: string + streamRegistry: StreamRegistry + recursive?: boolean + outstreamProvider: OutstreamProvider + topdir?: string +} + +interface SingleJobEmitterOptions { + file: string + streamRegistry: StreamRegistry + outstreamProvider: OutstreamProvider +} + +interface WatchJobEmitterOptions { + file: string + streamRegistry: StreamRegistry + recursive?: boolean + outstreamProvider: OutstreamProvider +} + +interface StatLike { + isDirectory(): boolean +} + +const fstatAsync = promisify(fs.fstat) + +async function myStat( + stdioStream: NodeJS.ReadStream | NodeJS.WriteStream, + filepath: string, +): Promise { + if (filepath === '-') { + const stream = stdioStream as NodeJS.ReadStream & { fd: number } + return await fstatAsync(stream.fd) + } + return await fsp.stat(filepath) +} + +function dirProvider(output: string): OutstreamProvider { + return async (inpath, indir = process.cwd()) => { + if (inpath == null || inpath === '-') { + throw new Error('You must provide an input to output to a directory') + } + + let relpath = path.relative(indir, inpath) + relpath = relpath.replace(/^(\.\.\/)+/, '') + const outpath = path.join(output, relpath) + const outdir = path.dirname(outpath) + + await fsp.mkdir(outdir, { recursive: true }) + const [, stats] = await tryCatch(fsp.stat(outpath)) + const mtime = stats?.mtime ?? new Date(0) + const outstream = fs.createWriteStream(outpath) as OutStream + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + outstream.on('error', () => {}) + outstream.mtime = mtime + return outstream + } +} + +function fileProvider(output: string): OutstreamProvider { + const dirExistsP = fsp.mkdir(path.dirname(output), { recursive: true }) + return async (_inpath) => { + await dirExistsP + if (output === '-') return process.stdout as OutStream + + const [, stats] = await tryCatch(fsp.stat(output)) + const mtime = stats?.mtime ?? new Date(0) + const outstream = fs.createWriteStream(output) as OutStream + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + outstream.on('error', () => {}) + outstream.mtime = mtime + return outstream + } +} + +function nullProvider(): OutstreamProvider { + return async (_inpath) => null +} + +class MyEventEmitter extends EventEmitter { + protected hasEnded: boolean + + constructor() { + super() + this.hasEnded = false + } + + override emit(event: string | symbol, ...args: unknown[]): boolean { + if (this.hasEnded) return false + if (event === 'end' || event === 'error') { + this.hasEnded = true + return super.emit(event, ...args) + } + return super.emit(event, ...args) + } +} + +class ReaddirJobEmitter extends MyEventEmitter { + constructor({ + dir, + streamRegistry, + recursive, + outstreamProvider, + topdir = dir, + }: ReaddirJobEmitterOptions) { + super() + + process.nextTick(() => { + this.processDirectory({ dir, streamRegistry, recursive, outstreamProvider, topdir }).catch( + (err) => { + this.emit('error', err) + }, + ) + }) + } + + private async processDirectory({ + dir, + streamRegistry, + recursive, + outstreamProvider, + topdir, + }: ReaddirJobEmitterOptions & { topdir: string }): Promise { + const files = await fsp.readdir(dir) + + const pendingOperations: Promise[] = [] + + for (const filename of files) { + const file = path.normalize(path.join(dir, filename)) + pendingOperations.push( + this.processFile({ file, streamRegistry, recursive, outstreamProvider, topdir }), + ) + } + + await Promise.all(pendingOperations) + this.emit('end') + } + + private async processFile({ + file, + streamRegistry, + recursive = false, + outstreamProvider, + topdir, + }: { + file: string + streamRegistry: StreamRegistry + recursive?: boolean + outstreamProvider: OutstreamProvider + topdir: string + }): Promise { + const stats = await fsp.stat(file) + + if (stats.isDirectory()) { + if (recursive) { + await new Promise((resolve, reject) => { + const subdirEmitter = new ReaddirJobEmitter({ + dir: file, + streamRegistry, + recursive, + outstreamProvider, + topdir, + }) + subdirEmitter.on('job', (job: Job) => this.emit('job', job)) + subdirEmitter.on('error', (error: Error) => reject(error)) + subdirEmitter.on('end', () => resolve()) + }) + } + } else { + const existing = streamRegistry[file] + if (existing) existing.end() + const outstream = await outstreamProvider(file, topdir) + streamRegistry[file] = outstream ?? undefined + const instream = fs.createReadStream(file) + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + instream.on('error', () => {}) + this.emit('job', { in: instream, out: outstream }) + } + } +} + +class SingleJobEmitter extends MyEventEmitter { + constructor({ file, streamRegistry, outstreamProvider }: SingleJobEmitterOptions) { + super() + + const normalizedFile = path.normalize(file) + const existing = streamRegistry[normalizedFile] + if (existing) existing.end() + outstreamProvider(normalizedFile).then((outstream) => { + streamRegistry[normalizedFile] = outstream ?? undefined + + let instream: Readable | null + if (normalizedFile === '-') { + if (tty.isatty(process.stdin.fd)) { + instream = null + } else { + instream = process.stdin + } + } else { + instream = fs.createReadStream(normalizedFile) + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + instream.on('error', () => {}) + } + + process.nextTick(() => { + this.emit('job', { in: instream, out: outstream }) + this.emit('end') + }) + }) + } +} + +class InputlessJobEmitter extends MyEventEmitter { + constructor({ + outstreamProvider, + }: { streamRegistry: StreamRegistry; outstreamProvider: OutstreamProvider }) { + super() + + process.nextTick(() => { + outstreamProvider(null).then((outstream) => { + try { + this.emit('job', { in: null, out: outstream }) + } catch (err) { + this.emit('error', err) + } + + this.emit('end') + }) + }) + } +} + +class NullJobEmitter extends MyEventEmitter { + constructor() { + super() + process.nextTick(() => this.emit('end')) + } +} + +class WatchJobEmitter extends MyEventEmitter { + private watcher: NodeWatcher | null = null + + constructor({ file, streamRegistry, recursive, outstreamProvider }: WatchJobEmitterOptions) { + super() + + this.init({ file, streamRegistry, recursive, outstreamProvider }).catch((err) => { + this.emit('error', err) + }) + + // Clean up watcher on process exit signals + const cleanup = () => this.close() + process.once('SIGINT', cleanup) + process.once('SIGTERM', cleanup) + } + + /** Close the file watcher and release resources */ + close(): void { + if (this.watcher) { + this.watcher.close() + this.watcher = null + } + } + + private async init({ + file, + streamRegistry, + recursive, + outstreamProvider, + }: WatchJobEmitterOptions): Promise { + const stats = await fsp.stat(file) + const topdir = stats.isDirectory() ? file : undefined + + const watchFn = await getNodeWatch() + this.watcher = watchFn(file, { recursive }) + + this.watcher.on('error', (err: Error) => { + this.close() + this.emit('error', err) + }) + this.watcher.on('close', () => this.emit('end')) + this.watcher.on('change', (_evt: string, filename: string) => { + const normalizedFile = path.normalize(filename) + this.handleChange(normalizedFile, topdir, streamRegistry, outstreamProvider).catch((err) => { + this.emit('error', err) + }) + }) + } + + private async handleChange( + normalizedFile: string, + topdir: string | undefined, + streamRegistry: StreamRegistry, + outstreamProvider: OutstreamProvider, + ): Promise { + const stats = await fsp.stat(normalizedFile) + if (stats.isDirectory()) return + + const existing = streamRegistry[normalizedFile] + if (existing) existing.end() + + const outstream = await outstreamProvider(normalizedFile, topdir) + streamRegistry[normalizedFile] = outstream ?? undefined + + const instream = fs.createReadStream(normalizedFile) + // Attach a no-op error handler to prevent unhandled errors if stream is destroyed + // before being consumed (e.g., due to output collision detection) + instream.on('error', () => {}) + this.emit('job', { in: instream, out: outstream }) + } +} + +class MergedJobEmitter extends MyEventEmitter { + constructor(...jobEmitters: MyEventEmitter[]) { + super() + + let ncomplete = 0 + + for (const jobEmitter of jobEmitters) { + jobEmitter.on('error', (err: Error) => this.emit('error', err)) + jobEmitter.on('job', (job: Job) => this.emit('job', job)) + jobEmitter.on('end', () => { + if (++ncomplete === jobEmitters.length) this.emit('end') + }) + } + + if (jobEmitters.length === 0) { + this.emit('end') + } + } +} + +class ConcattedJobEmitter extends MyEventEmitter { + constructor(emitterFn: () => MyEventEmitter, ...emitterFns: (() => MyEventEmitter)[]) { + super() + + const emitter = emitterFn() + + emitter.on('error', (err: Error) => this.emit('error', err)) + emitter.on('job', (job: Job) => this.emit('job', job)) + + if (emitterFns.length === 0) { + emitter.on('end', () => this.emit('end')) + } else { + emitter.on('end', () => { + const firstFn = emitterFns[0] + if (!firstFn) { + this.emit('end') + return + } + const restEmitter = new ConcattedJobEmitter(firstFn, ...emitterFns.slice(1)) + restEmitter.on('error', (err: Error) => this.emit('error', err)) + restEmitter.on('job', (job: Job) => this.emit('job', job)) + restEmitter.on('end', () => this.emit('end')) + }) + } + } +} + +function detectConflicts(jobEmitter: EventEmitter): MyEventEmitter { + const emitter = new MyEventEmitter() + const outfileAssociations: Record = {} + + jobEmitter.on('end', () => emitter.emit('end')) + jobEmitter.on('error', (err: Error) => emitter.emit('error', err)) + jobEmitter.on('job', (job: Job) => { + if (job.in == null || job.out == null) { + emitter.emit('job', job) + return + } + const inPath = (job.in as fs.ReadStream).path as string + const outPath = job.out.path as string + if (Object.hasOwn(outfileAssociations, outPath) && outfileAssociations[outPath] !== inPath) { + emitter.emit( + 'error', + new Error(`Output collision between '${inPath}' and '${outfileAssociations[outPath]}'`), + ) + } else { + outfileAssociations[outPath] = inPath + emitter.emit('job', job) + } + }) + + return emitter +} + +function dismissStaleJobs(jobEmitter: EventEmitter): MyEventEmitter { + const emitter = new MyEventEmitter() + const pendingChecks: Promise[] = [] + + jobEmitter.on('end', () => Promise.all(pendingChecks).then(() => emitter.emit('end'))) + jobEmitter.on('error', (err: Error) => emitter.emit('error', err)) + jobEmitter.on('job', (job: Job) => { + if (job.in == null || job.out == null) { + emitter.emit('job', job) + return + } + + const inPath = (job.in as fs.ReadStream).path as string + const checkPromise = fsp + .stat(inPath) + .then((stats) => { + const inM = stats.mtime + const outM = job.out?.mtime ?? new Date(0) + + if (outM <= inM) emitter.emit('job', job) + }) + .catch(() => { + emitter.emit('job', job) + }) + pendingChecks.push(checkPromise) + }) + + return emitter +} + +function makeJobEmitter( + inputs: string[], + { + recursive, + outstreamProvider, + streamRegistry, + watch: watchOption, + reprocessStale, + }: JobEmitterOptions, +): MyEventEmitter { + const emitter = new EventEmitter() + + const emitterFns: (() => MyEventEmitter)[] = [] + const watcherFns: (() => MyEventEmitter)[] = [] + + async function processInputs(): Promise { + for (const input of inputs) { + if (input === '-') { + emitterFns.push( + () => new SingleJobEmitter({ file: input, outstreamProvider, streamRegistry }), + ) + watcherFns.push(() => new NullJobEmitter()) + } else { + const stats = await fsp.stat(input) + if (stats.isDirectory()) { + emitterFns.push( + () => + new ReaddirJobEmitter({ dir: input, recursive, outstreamProvider, streamRegistry }), + ) + watcherFns.push( + () => + new WatchJobEmitter({ file: input, recursive, outstreamProvider, streamRegistry }), + ) + } else { + emitterFns.push( + () => new SingleJobEmitter({ file: input, outstreamProvider, streamRegistry }), + ) + watcherFns.push( + () => + new WatchJobEmitter({ file: input, recursive, outstreamProvider, streamRegistry }), + ) + } + } + } + + if (inputs.length === 0) { + emitterFns.push(() => new InputlessJobEmitter({ outstreamProvider, streamRegistry })) + } + + startEmitting() + } + + function startEmitting(): void { + let source: MyEventEmitter = new MergedJobEmitter(...emitterFns.map((f) => f())) + + if (watchOption) { + source = new ConcattedJobEmitter( + () => source, + () => new MergedJobEmitter(...watcherFns.map((f) => f())), + ) + } + + source.on('job', (job: Job) => emitter.emit('job', job)) + source.on('error', (err: Error) => emitter.emit('error', err)) + source.on('end', () => emitter.emit('end')) + } + + processInputs().catch((err) => { + emitter.emit('error', err) + }) + + const stalefilter = reprocessStale ? (x: EventEmitter) => x as MyEventEmitter : dismissStaleJobs + return stalefilter(detectConflicts(emitter)) +} + +export interface AssembliesCreateOptions { + steps?: string + template?: string + fields?: Record + watch?: boolean + recursive?: boolean + inputs: string[] + output?: string | null + del?: boolean + reprocessStale?: boolean + singleAssembly?: boolean + concurrency?: number +} + +const DEFAULT_CONCURRENCY = 5 + +// --- Main assembly create function --- +export async function create( + outputctl: IOutputCtl, + client: Transloadit, + { + steps, + template, + fields, + watch: watchOption, + recursive, + inputs, + output, + del, + reprocessStale, + singleAssembly, + concurrency = DEFAULT_CONCURRENCY, + }: AssembliesCreateOptions, +): Promise<{ results: unknown[]; hasFailures: boolean }> { + // Quick fix for https://github.com/transloadit/transloadify/issues/13 + // Only default to stdout when output is undefined (not provided), not when explicitly null + let resolvedOutput = output + if (resolvedOutput === undefined && !process.stdout.isTTY) resolvedOutput = '-' + + // Read steps file async before entering the Promise constructor + // We use StepsInput (the input type) rather than Steps (the transformed output type) + // to avoid zod adding default values that the API may reject + let stepsData: StepsInput | undefined + if (steps) { + const stepsContent = await fsp.readFile(steps, 'utf8') + const parsed: unknown = JSON.parse(stepsContent) + // Basic structural validation: must be an object with step names as keys + if (parsed == null || typeof parsed !== 'object' || Array.isArray(parsed)) { + throw new Error('Invalid steps format: expected an object with step names as keys') + } + // Validate each step has a robot field + for (const [stepName, step] of Object.entries(parsed)) { + if (step == null || typeof step !== 'object' || Array.isArray(step)) { + throw new Error(`Invalid steps format: step '${stepName}' must be an object`) + } + if (!('robot' in step) || typeof (step as Record).robot !== 'string') { + throw new Error( + `Invalid steps format: step '${stepName}' must have a 'robot' string property`, + ) + } + } + stepsData = parsed as StepsInput + } + + // Determine output stat async before entering the Promise constructor + let outstat: StatLike | undefined + if (resolvedOutput != null) { + const [err, stat] = await tryCatch(myStat(process.stdout, resolvedOutput)) + if (err && (!isErrnoException(err) || err.code !== 'ENOENT')) throw err + outstat = stat ?? { isDirectory: () => false } + + if (!outstat.isDirectory() && inputs.length !== 0) { + const firstInput = inputs[0] + if (firstInput) { + const firstInputStat = await myStat(process.stdin, firstInput) + if (inputs.length > 1 || firstInputStat.isDirectory()) { + const msg = 'Output must be a directory when specifying multiple inputs' + outputctl.error(msg) + throw new Error(msg) + } + } + } + } + + return new Promise((resolve, reject) => { + const params: CreateAssemblyParams = ( + stepsData ? { steps: stepsData as CreateAssemblyParams['steps'] } : { template_id: template } + ) as CreateAssemblyParams + if (fields) { + params.fields = fields + } + + const outstreamProvider: OutstreamProvider = + resolvedOutput == null + ? nullProvider() + : outstat?.isDirectory() + ? dirProvider(resolvedOutput) + : fileProvider(resolvedOutput) + const streamRegistry: StreamRegistry = {} + + const emitter = makeJobEmitter(inputs, { + recursive, + watch: watchOption, + outstreamProvider, + streamRegistry, + reprocessStale, + }) + + // Use p-queue for concurrency management + const queue = new PQueue({ concurrency }) + const results: unknown[] = [] + let hasFailures = false + // AbortController to cancel all in-flight createAssembly calls when an error occurs + const abortController = new AbortController() + + // Helper to process a single assembly job + async function processAssemblyJob( + inPath: string | null, + outPath: string | null, + outMtime: Date | undefined, + ): Promise { + outputctl.debug(`PROCESSING JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) + + // Create fresh streams for this job + const inStream = inPath ? fs.createReadStream(inPath) : null + inStream?.on('error', () => {}) + const outStream = outPath ? (fs.createWriteStream(outPath) as OutStream) : null + outStream?.on('error', () => {}) + if (outStream) outStream.mtime = outMtime + + let superceded = false + if (outStream != null) { + outStream.on('finish', () => { + superceded = true + }) + } + + const createOptions: CreateAssemblyOptions = { + params, + signal: abortController.signal, + } + if (inStream != null) { + createOptions.uploads = { in: inStream } + } + + const result = await client.createAssembly(createOptions) + if (superceded) return undefined + + const assemblyId = result.assembly_id + if (!assemblyId) throw new Error('No assembly_id in result') + + const assembly = await client.awaitAssemblyCompletion(assemblyId, { + signal: abortController.signal, + onPoll: () => { + if (superceded) return false + return true + }, + onAssemblyProgress: (status) => { + outputctl.debug(`Assembly status: ${status.ok}`) + }, + }) + + if (superceded) return undefined + + if (assembly.error || (assembly.ok && assembly.ok !== 'ASSEMBLY_COMPLETED')) { + const msg = `Assembly failed: ${assembly.error || assembly.message} (Status: ${assembly.ok})` + outputctl.error(msg) + throw new Error(msg) + } + + if (!assembly.results) throw new Error('No results in assembly') + const resultsKeys = Object.keys(assembly.results) + const firstKey = resultsKeys[0] + if (!firstKey) throw new Error('No results in assembly') + const firstResult = assembly.results[firstKey] + if (!firstResult || !firstResult[0]) throw new Error('No results in assembly') + const resulturl = firstResult[0].url + + if (outStream != null && resulturl && !superceded) { + outputctl.debug('DOWNLOADING') + const [dlErr] = await tryCatch( + pipeline(got.stream(resulturl, { signal: abortController.signal }), outStream), + ) + if (dlErr) { + if (dlErr.name !== 'AbortError') { + outputctl.error(dlErr.message) + throw dlErr + } + } + } + + outputctl.debug(`COMPLETED ${inPath ?? 'null'} ${outPath ?? 'null'}`) + + if (del && inPath) { + await fsp.unlink(inPath) + } + return assembly + } + + if (singleAssembly) { + // Single-assembly mode: collect file paths, then create one assembly with all inputs + // We close streams immediately to avoid exhausting file descriptors with many files + const collectedPaths: string[] = [] + + emitter.on('job', (job: Job) => { + if (job.in != null) { + const inPath = (job.in as fs.ReadStream).path as string + outputctl.debug(`COLLECTING JOB ${inPath}`) + collectedPaths.push(inPath) + // Close the stream immediately to avoid file descriptor exhaustion + ;(job.in as fs.ReadStream).destroy() + outputctl.debug(`STREAM CLOSED ${inPath}`) + } + }) + + emitter.on('error', (err: Error) => { + abortController.abort() + queue.clear() + outputctl.error(err) + reject(err) + }) + + emitter.on('end', async () => { + if (collectedPaths.length === 0) { + resolve({ results: [], hasFailures: false }) + return + } + + // Build uploads object, creating fresh streams for each file + const uploads: Record = {} + const inputPaths: string[] = [] + for (const inPath of collectedPaths) { + const basename = path.basename(inPath) + let key = basename + let counter = 1 + while (key in uploads) { + key = `${path.parse(basename).name}_${counter}${path.parse(basename).ext}` + counter++ + } + uploads[key] = fs.createReadStream(inPath) + inputPaths.push(inPath) + } + + outputctl.debug(`Creating single assembly with ${Object.keys(uploads).length} files`) + + try { + const assembly = await queue.add(async () => { + const createOptions: CreateAssemblyOptions = { + params, + signal: abortController.signal, + } + if (Object.keys(uploads).length > 0) { + createOptions.uploads = uploads + } + + const result = await client.createAssembly(createOptions) + const assemblyId = result.assembly_id + if (!assemblyId) throw new Error('No assembly_id in result') + + const asm = await client.awaitAssemblyCompletion(assemblyId, { + signal: abortController.signal, + onAssemblyProgress: (status) => { + outputctl.debug(`Assembly status: ${status.ok}`) + }, + }) + + if (asm.error || (asm.ok && asm.ok !== 'ASSEMBLY_COMPLETED')) { + const msg = `Assembly failed: ${asm.error || asm.message} (Status: ${asm.ok})` + outputctl.error(msg) + throw new Error(msg) + } + + // Download all results + if (asm.results && resolvedOutput != null) { + for (const [stepName, stepResults] of Object.entries(asm.results)) { + for (const stepResult of stepResults) { + const resultUrl = stepResult.url + if (!resultUrl) continue + + let outPath: string + if (outstat?.isDirectory()) { + outPath = path.join(resolvedOutput, stepResult.name || `${stepName}_result`) + } else { + outPath = resolvedOutput + } + + outputctl.debug(`DOWNLOADING ${stepResult.name} to ${outPath}`) + const [dlErr] = await tryCatch( + pipeline( + got.stream(resultUrl, { signal: abortController.signal }), + fs.createWriteStream(outPath), + ), + ) + if (dlErr) { + if (dlErr.name === 'AbortError') continue + outputctl.error(dlErr.message) + throw dlErr + } + } + } + } + + // Delete input files if requested + if (del) { + for (const inPath of inputPaths) { + await fsp.unlink(inPath) + } + } + return asm + }) + results.push(assembly) + } catch (err) { + hasFailures = true + outputctl.error(err as Error) + } + + resolve({ results, hasFailures }) + }) + } else { + // Default mode: one assembly per file with p-queue concurrency limiting + emitter.on('job', (job: Job) => { + const inPath = job.in + ? (((job.in as fs.ReadStream).path as string | undefined) ?? null) + : null + const outPath = job.out?.path ?? null + const outMtime = job.out?.mtime + outputctl.debug(`GOT JOB ${inPath ?? 'null'} ${outPath ?? 'null'}`) + + // Close the original streams immediately - we'll create fresh ones when processing + if (job.in != null) { + ;(job.in as fs.ReadStream).destroy() + } + if (job.out != null) { + job.out.destroy() + } + + // Add job to queue - p-queue handles concurrency automatically + queue + .add(async () => { + const result = await processAssemblyJob(inPath, outPath, outMtime) + if (result !== undefined) { + results.push(result) + } + }) + .catch((err: unknown) => { + hasFailures = true + outputctl.error(err as Error) + }) + }) + + emitter.on('error', (err: Error) => { + abortController.abort() + queue.clear() + outputctl.error(err) + reject(err) + }) + + emitter.on('end', async () => { + // Wait for all queued jobs to complete + await queue.onIdle() + resolve({ results, hasFailures }) + }) + } + }) +} + +// --- Command classes --- export class AssembliesCreateCommand extends AuthenticatedCommand { static override paths = [ ['assemblies', 'create'], @@ -119,7 +1189,7 @@ export class AssembliesCreateCommand extends AuthenticatedCommand { return 1 } - const { hasFailures } = await assembliesCreate(this.output, this.client, { + const { hasFailures } = await create(this.output, this.client, { steps: this.steps, template: this.template, fields: fieldsMap, @@ -173,7 +1243,7 @@ export class AssembliesListCommand extends AuthenticatedCommand { const keywordList = this.keywords ? this.keywords.split(',') : undefined const fieldList = this.fields ? this.fields.split(',') : undefined - await assemblies.list(this.output, this.client, { + await list(this.output, this.client, { before: this.before, after: this.after, keywords: keywordList, @@ -200,7 +1270,7 @@ export class AssembliesGetCommand extends AuthenticatedCommand { assemblyIds = Option.Rest({ required: 1 }) protected async run(): Promise { - await assemblies.get(this.output, this.client, { + await get(this.output, this.client, { assemblies: this.assemblyIds, }) return undefined @@ -226,7 +1296,7 @@ export class AssembliesDeleteCommand extends AuthenticatedCommand { assemblyIds = Option.Rest({ required: 1 }) protected async run(): Promise { - await assemblies.delete(this.output, this.client, { + await deleteAssemblies(this.output, this.client, { assemblies: this.assemblyIds, }) return undefined @@ -292,7 +1362,7 @@ export class AssembliesReplayCommand extends AuthenticatedCommand { fieldsMap[key] = value } - await assemblies.replay(this.output, this.client, { + await replay(this.output, this.client, { fields: fieldsMap, reparse: this.reparseTemplate, steps: this.steps, diff --git a/src/cli/commands/bills.ts b/src/cli/commands/bills.ts index 821b3be8..03d0a998 100644 --- a/src/cli/commands/bills.ts +++ b/src/cli/commands/bills.ts @@ -1,7 +1,46 @@ import { Command, Option } from 'clipanion' -import * as bills from '../bills.ts' +import { z } from 'zod' +import { tryCatch } from '../../alphalib/tryCatch.ts' +import type { Transloadit } from '../../Transloadit.ts' +import { formatAPIError } from '../helpers.ts' +import type { IOutputCtl } from '../OutputCtl.ts' import { AuthenticatedCommand } from './BaseCommand.ts' +// --- Types and business logic --- + +export interface BillsGetOptions { + months: string[] +} + +const BillResponseSchema = z.object({ + total: z.number(), +}) + +export async function get( + output: IOutputCtl, + client: Transloadit, + { months }: BillsGetOptions, +): Promise { + const requests = months.map((month) => client.getBill(month)) + + const [err, results] = await tryCatch(Promise.all(requests)) + if (err) { + output.error(formatAPIError(err)) + return + } + + for (const result of results) { + const parsed = BillResponseSchema.safeParse(result) + if (parsed.success) { + output.print(`$${parsed.data.total}`, result) + } else { + output.print('Unable to parse bill response', result) + } + } +} + +// --- Command class --- + export class BillsGetCommand extends AuthenticatedCommand { static override paths = [ ['bills', 'get'], @@ -44,7 +83,7 @@ export class BillsGetCommand extends AuthenticatedCommand { monthList.push(`${d.getUTCFullYear()}-${d.getUTCMonth() + 1}`) } - await bills.get(this.output, this.client, { + await get(this.output, this.client, { months: monthList, }) return undefined diff --git a/src/cli/commands/notifications.ts b/src/cli/commands/notifications.ts index 32647d76..e31b4edc 100644 --- a/src/cli/commands/notifications.ts +++ b/src/cli/commands/notifications.ts @@ -1,7 +1,31 @@ import { Command, Option } from 'clipanion' -import * as notifications from '../notifications.ts' +import { tryCatch } from '../../alphalib/tryCatch.ts' +import type { Transloadit } from '../../Transloadit.ts' +import type { IOutputCtl } from '../OutputCtl.ts' +import { ensureError } from '../types.ts' import { AuthenticatedCommand } from './BaseCommand.ts' +// --- Types and business logic --- + +export interface NotificationsReplayOptions { + notify_url?: string + assemblies: string[] +} + +export async function replay( + output: IOutputCtl, + client: Transloadit, + { notify_url, assemblies }: NotificationsReplayOptions, +): Promise { + const promises = assemblies.map((id) => client.replayAssemblyNotification(id, { notify_url })) + const [err] = await tryCatch(Promise.all(promises)) + if (err) { + output.error(ensureError(err).message) + } +} + +// --- Command class --- + export class NotificationsReplayCommand extends AuthenticatedCommand { static override paths = [ ['assembly-notifications', 'replay'], @@ -30,7 +54,7 @@ export class NotificationsReplayCommand extends AuthenticatedCommand { assemblyIds = Option.Rest({ required: 1 }) protected async run(): Promise { - await notifications.replay(this.output, this.client, { + await replay(this.output, this.client, { notify_url: this.notifyUrl, assemblies: this.assemblyIds, }) diff --git a/src/cli/commands/templates.ts b/src/cli/commands/templates.ts index 5be53de3..f0649687 100644 --- a/src/cli/commands/templates.ts +++ b/src/cli/commands/templates.ts @@ -1,7 +1,337 @@ +import fsp from 'node:fs/promises' +import path from 'node:path' +import { promisify } from 'node:util' import { Command, Option } from 'clipanion' -import * as templates from '../templates.ts' +import rreaddir from 'recursive-readdir' +import { z } from 'zod' +import { tryCatch } from '../../alphalib/tryCatch.ts' +import type { Steps } from '../../alphalib/types/template.ts' +import { stepsSchema } from '../../alphalib/types/template.ts' +import type { TemplateContent } from '../../apiTypes.ts' +import type { Transloadit } from '../../Transloadit.ts' +import { createReadStream, formatAPIError, streamToBuffer } from '../helpers.ts' +import type { IOutputCtl } from '../OutputCtl.ts' +import ModifiedLookup from '../template-last-modified.ts' +import type { TemplateFile } from '../types.ts' +import { ensureError, isTransloaditAPIError, TemplateFileDataSchema } from '../types.ts' import { AuthenticatedCommand } from './BaseCommand.ts' +const rreaddirAsync = promisify(rreaddir) + +export interface TemplateCreateOptions { + name: string + file: string +} + +export interface TemplateGetOptions { + templates: string[] +} + +export interface TemplateModifyOptions { + template: string + name?: string + file: string +} + +export interface TemplateDeleteOptions { + templates: string[] +} + +export interface TemplateListOptions { + before?: string + after?: string + order?: 'asc' | 'desc' + sort?: string + fields?: string[] +} + +export interface TemplateSyncOptions { + files: string[] + recursive?: boolean +} + +export async function create( + output: IOutputCtl, + client: Transloadit, + { name, file }: TemplateCreateOptions, +): Promise { + try { + const buf = await streamToBuffer(createReadStream(file)) + + const parsed: unknown = JSON.parse(buf.toString()) + const validated = stepsSchema.safeParse(parsed) + if (!validated.success) { + throw new Error(`Invalid template steps format: ${validated.error.message}`) + } + + const result = await client.createTemplate({ + name, + // Steps (validated) is assignable to StepsInput at runtime; cast for TS + template: { steps: validated.data } as TemplateContent, + }) + output.print(result.id, result) + return result + } catch (err) { + const error = ensureError(err) + output.error(error.message) + throw err + } +} + +export async function get( + output: IOutputCtl, + client: Transloadit, + { templates }: TemplateGetOptions, +): Promise { + const requests = templates.map((template) => client.getTemplate(template)) + + const [err, results] = await tryCatch(Promise.all(requests)) + if (err) { + output.error(formatAPIError(err)) + throw err + } + + for (const result of results) { + output.print(result, result) + } +} + +export async function modify( + output: IOutputCtl, + client: Transloadit, + { template, name, file }: TemplateModifyOptions, +): Promise { + try { + const buf = await streamToBuffer(createReadStream(file)) + + let steps: Steps | null = null + let newName = name + + if (buf.length > 0) { + const parsed: unknown = JSON.parse(buf.toString()) + const validated = stepsSchema.safeParse(parsed) + if (!validated.success) { + throw new Error(`Invalid template steps format: ${validated.error.message}`) + } + steps = validated.data + } + + if (!name || buf.length === 0) { + const tpl = await client.getTemplate(template) + if (!name) newName = tpl.name + if (buf.length === 0 && tpl.content.steps) { + steps = tpl.content.steps + } + } + + if (steps === null) { + throw new Error('No steps to update template with') + } + + await client.editTemplate(template, { + name: newName, + // Steps (validated) is assignable to StepsInput at runtime; cast for TS + template: { steps } as TemplateContent, + }) + } catch (err) { + output.error(formatAPIError(err)) + throw err + } +} + +async function deleteTemplates( + output: IOutputCtl, + client: Transloadit, + { templates }: TemplateDeleteOptions, +): Promise { + await Promise.all( + templates.map(async (template) => { + const [err] = await tryCatch(client.deleteTemplate(template)) + if (err) { + output.error(formatAPIError(err)) + throw err + } + }), + ) +} + +// Export with `delete` alias for external consumers +export { deleteTemplates as delete } + +const TemplateIdSchema = z.object({ + id: z.string(), +}) + +export function list( + output: IOutputCtl, + client: Transloadit, + { before, after, order, sort, fields }: TemplateListOptions, +): void { + const stream = client.streamTemplates({ + todate: before, + fromdate: after, + order, + sort: sort as 'id' | 'name' | 'created' | 'modified' | undefined, + }) + + stream.on('readable', () => { + const template: unknown = stream.read() + if (template == null) return + + const parsed = TemplateIdSchema.safeParse(template) + if (!parsed.success) return + + if (fields == null) { + output.print(parsed.data.id, template) + } else { + const templateRecord = template as Record + output.print(fields.map((field) => templateRecord[field]).join(' '), template) + } + }) + + stream.on('error', (err: unknown) => { + output.error(formatAPIError(err)) + }) +} + +export async function sync( + output: IOutputCtl, + client: Transloadit, + { files, recursive }: TemplateSyncOptions, +): Promise { + // Promise [String] -- all files in the directory tree + const relevantFilesNested = await Promise.all( + files.map(async (file) => { + const stats = await fsp.stat(file) + if (!stats.isDirectory()) return [file] + + let children: string[] + if (recursive) { + children = (await rreaddirAsync(file)) as string[] + } else { + const list = await fsp.readdir(file) + children = list.map((child) => path.join(file, child)) + } + + if (recursive) return children + + // Filter directories if not recursive + const filtered = await Promise.all( + children.map(async (child) => { + const childStats = await fsp.stat(child) + return childStats.isDirectory() ? null : child + }), + ) + return filtered.filter((f): f is string => f !== null) + }), + ) + const relevantFiles = relevantFilesNested.flat() + + // Promise [{ file: String, data: JSON }] -- all templates + const maybeFiles = await Promise.all(relevantFiles.map(templateFileOrNull)) + const templates = maybeFiles.filter((maybeFile): maybeFile is TemplateFile => maybeFile !== null) + + async function templateFileOrNull(file: string): Promise { + if (path.extname(file) !== '.json') return null + + try { + const data = await fsp.readFile(file, 'utf8') + const parsed: unknown = JSON.parse(data) + const validated = TemplateFileDataSchema.safeParse(parsed) + if (!validated.success) return null + return 'transloadit_template_id' in validated.data ? { file, data: validated.data } : null + } catch (e) { + if (e instanceof SyntaxError) return null + throw e + } + } + + const modified = new ModifiedLookup(client) + + const [err] = await tryCatch( + Promise.all( + templates.map(async (template) => { + if (!('steps' in template.data)) { + if (!template.data.transloadit_template_id) { + throw new Error(`Template file has no id and no steps: ${template.file}`) + } + return download(template) + } + + if (!template.data.transloadit_template_id) return upload(template) + + const stats = await fsp.stat(template.file) + const fileModified = stats.mtime + + let templateModified: Date + const templateId = template.data.transloadit_template_id + try { + await client.getTemplate(templateId) + templateModified = await new Promise((resolve, reject) => + modified.byId(templateId, (err, res) => { + if (err) { + reject(err) + } else if (res) { + resolve(res) + } else { + reject(new Error('No date returned')) + } + }), + ) + } catch (err) { + if (isTransloaditAPIError(err)) { + if (err.code === 'SERVER_404' || (err.response && err.response.statusCode === 404)) { + throw new Error(`Template file references nonexistent template: ${template.file}`) + } + } + throw err + } + + if (fileModified > templateModified) return upload(template) + return download(template) + }), + ), + ) + if (err) { + output.error(err) + throw err + } + + async function upload(template: TemplateFile): Promise { + const params = { + name: path.basename(template.file, '.json'), + template: { steps: template.data.steps } as TemplateContent, + } + + if (!template.data.transloadit_template_id) { + const result = await client.createTemplate(params) + template.data.transloadit_template_id = result.id + await fsp.writeFile(template.file, JSON.stringify(template.data)) + return + } + + await client.editTemplate(template.data.transloadit_template_id, params) + } + + async function download(template: TemplateFile): Promise { + const templateId = template.data.transloadit_template_id + if (!templateId) { + throw new Error('Cannot download template without id') + } + + const result = await client.getTemplate(templateId) + + // Use empty object if template has no steps (undefined would be stripped by JSON.stringify) + template.data.steps = result.content.steps ?? {} + const file = path.join(path.dirname(template.file), `${result.name}.json`) + + await fsp.writeFile(template.file, JSON.stringify(template.data)) + + if (file !== template.file) { + await fsp.rename(template.file, file) + } + } +} export class TemplatesCreateCommand extends AuthenticatedCommand { static override paths = [ ['templates', 'create'], @@ -27,7 +357,7 @@ export class TemplatesCreateCommand extends AuthenticatedCommand { file = Option.String({ required: false }) protected async run(): Promise { - await templates.create(this.output, this.client, { + await create(this.output, this.client, { name: this.name, file: this.file ?? '-', }) @@ -52,7 +382,7 @@ export class TemplatesGetCommand extends AuthenticatedCommand { templateIds = Option.Rest({ required: 1 }) protected async run(): Promise { - await templates.get(this.output, this.client, { + await get(this.output, this.client, { templates: this.templateIds, }) return undefined @@ -90,7 +420,7 @@ export class TemplatesModifyCommand extends AuthenticatedCommand { file = Option.String({ required: false }) protected async run(): Promise { - await templates.modify(this.output, this.client, { + await modify(this.output, this.client, { template: this.templateId, name: this.newName, file: this.file ?? '-', @@ -116,7 +446,7 @@ export class TemplatesDeleteCommand extends AuthenticatedCommand { templateIds = Option.Rest({ required: 1 }) protected async run(): Promise { - await templates.delete(this.output, this.client, { + await deleteTemplates(this.output, this.client, { templates: this.templateIds, }) return undefined @@ -173,7 +503,7 @@ export class TemplatesListCommand extends AuthenticatedCommand { const fieldList = this.fields ? this.fields.split(',') : undefined - await templates.list(this.output, this.client, { + await list(this.output, this.client, { after: this.after, before: this.before, sort: this.sort, @@ -217,7 +547,7 @@ export class TemplatesSyncCommand extends AuthenticatedCommand { files = Option.Rest() protected async run(): Promise { - await templates.sync(this.output, this.client, { + await sync(this.output, this.client, { recursive: this.recursive, files: this.files, }) diff --git a/src/cli/notifications.ts b/src/cli/notifications.ts deleted file mode 100644 index 8a32bac6..00000000 --- a/src/cli/notifications.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { tryCatch } from '../alphalib/tryCatch.ts' -import type { Transloadit } from '../Transloadit.ts' -import type { IOutputCtl } from './OutputCtl.ts' -import { ensureError } from './types.ts' - -export interface NotificationsReplayOptions { - notify_url?: string - assemblies: string[] -} - -export async function replay( - output: IOutputCtl, - client: Transloadit, - { notify_url, assemblies }: NotificationsReplayOptions, -): Promise { - const promises = assemblies.map((id) => client.replayAssemblyNotification(id, { notify_url })) - const [err] = await tryCatch(Promise.all(promises)) - if (err) { - output.error(ensureError(err).message) - } -} diff --git a/src/cli/templates.ts b/src/cli/templates.ts deleted file mode 100644 index 6a15db92..00000000 --- a/src/cli/templates.ts +++ /dev/null @@ -1,330 +0,0 @@ -import fsp from 'node:fs/promises' -import path from 'node:path' -import { promisify } from 'node:util' -import rreaddir from 'recursive-readdir' -import { z } from 'zod' -import { tryCatch } from '../alphalib/tryCatch.ts' -import type { Steps } from '../alphalib/types/template.ts' -import { stepsSchema } from '../alphalib/types/template.ts' -import type { TemplateContent } from '../apiTypes.ts' -import type { Transloadit } from '../Transloadit.ts' -import { createReadStream, formatAPIError, streamToBuffer } from './helpers.ts' -import type { IOutputCtl } from './OutputCtl.ts' -import ModifiedLookup from './template-last-modified.ts' -import type { TemplateFile } from './types.ts' -import { ensureError, isTransloaditAPIError, TemplateFileDataSchema } from './types.ts' - -const rreaddirAsync = promisify(rreaddir) - -export interface TemplateCreateOptions { - name: string - file: string -} - -export interface TemplateGetOptions { - templates: string[] -} - -export interface TemplateModifyOptions { - template: string - name?: string - file: string -} - -export interface TemplateDeleteOptions { - templates: string[] -} - -export interface TemplateListOptions { - before?: string - after?: string - order?: 'asc' | 'desc' - sort?: string - fields?: string[] -} - -export interface TemplateSyncOptions { - files: string[] - recursive?: boolean -} - -export async function create( - output: IOutputCtl, - client: Transloadit, - { name, file }: TemplateCreateOptions, -): Promise { - try { - const buf = await streamToBuffer(createReadStream(file)) - - const parsed: unknown = JSON.parse(buf.toString()) - const validated = stepsSchema.safeParse(parsed) - if (!validated.success) { - throw new Error(`Invalid template steps format: ${validated.error.message}`) - } - - const result = await client.createTemplate({ - name, - // Steps (validated) is assignable to StepsInput at runtime; cast for TS - template: { steps: validated.data } as TemplateContent, - }) - output.print(result.id, result) - return result - } catch (err) { - const error = ensureError(err) - output.error(error.message) - throw err - } -} - -export async function get( - output: IOutputCtl, - client: Transloadit, - { templates }: TemplateGetOptions, -): Promise { - const requests = templates.map((template) => client.getTemplate(template)) - - const [err, results] = await tryCatch(Promise.all(requests)) - if (err) { - output.error(formatAPIError(err)) - throw err - } - - for (const result of results) { - output.print(result, result) - } -} - -export async function modify( - output: IOutputCtl, - client: Transloadit, - { template, name, file }: TemplateModifyOptions, -): Promise { - try { - const buf = await streamToBuffer(createReadStream(file)) - - let steps: Steps | null = null - let newName = name - - if (buf.length > 0) { - const parsed: unknown = JSON.parse(buf.toString()) - const validated = stepsSchema.safeParse(parsed) - if (!validated.success) { - throw new Error(`Invalid template steps format: ${validated.error.message}`) - } - steps = validated.data - } - - if (!name || buf.length === 0) { - const tpl = await client.getTemplate(template) - if (!name) newName = tpl.name - if (buf.length === 0 && tpl.content.steps) { - steps = tpl.content.steps - } - } - - if (steps === null) { - throw new Error('No steps to update template with') - } - - await client.editTemplate(template, { - name: newName, - // Steps (validated) is assignable to StepsInput at runtime; cast for TS - template: { steps } as TemplateContent, - }) - } catch (err) { - output.error(formatAPIError(err)) - throw err - } -} - -async function _delete( - output: IOutputCtl, - client: Transloadit, - { templates }: TemplateDeleteOptions, -): Promise { - await Promise.all( - templates.map(async (template) => { - const [err] = await tryCatch(client.deleteTemplate(template)) - if (err) { - output.error(formatAPIError(err)) - throw err - } - }), - ) -} -export { _delete as delete } - -const TemplateIdSchema = z.object({ - id: z.string(), -}) - -export function list( - output: IOutputCtl, - client: Transloadit, - { before, after, order, sort, fields }: TemplateListOptions, -): void { - const stream = client.streamTemplates({ - todate: before, - fromdate: after, - order, - sort: sort as 'id' | 'name' | 'created' | 'modified' | undefined, - }) - - stream.on('readable', () => { - const template: unknown = stream.read() - if (template == null) return - - const parsed = TemplateIdSchema.safeParse(template) - if (!parsed.success) return - - if (fields == null) { - output.print(parsed.data.id, template) - } else { - const templateRecord = template as Record - output.print(fields.map((field) => templateRecord[field]).join(' '), template) - } - }) - - stream.on('error', (err: unknown) => { - output.error(formatAPIError(err)) - }) -} - -export async function sync( - output: IOutputCtl, - client: Transloadit, - { files, recursive }: TemplateSyncOptions, -): Promise { - // Promise [String] -- all files in the directory tree - const relevantFilesNested = await Promise.all( - files.map(async (file) => { - const stats = await fsp.stat(file) - if (!stats.isDirectory()) return [file] - - let children: string[] - if (recursive) { - children = (await rreaddirAsync(file)) as string[] - } else { - const list = await fsp.readdir(file) - children = list.map((child) => path.join(file, child)) - } - - if (recursive) return children - - // Filter directories if not recursive - const filtered = await Promise.all( - children.map(async (child) => { - const childStats = await fsp.stat(child) - return childStats.isDirectory() ? null : child - }), - ) - return filtered.filter((f): f is string => f !== null) - }), - ) - const relevantFiles = relevantFilesNested.flat() - - // Promise [{ file: String, data: JSON }] -- all templates - const maybeFiles = await Promise.all(relevantFiles.map(templateFileOrNull)) - const templates = maybeFiles.filter((maybeFile): maybeFile is TemplateFile => maybeFile !== null) - - async function templateFileOrNull(file: string): Promise { - if (path.extname(file) !== '.json') return null - - try { - const data = await fsp.readFile(file, 'utf8') - const parsed: unknown = JSON.parse(data) - const validated = TemplateFileDataSchema.safeParse(parsed) - if (!validated.success) return null - return 'transloadit_template_id' in validated.data ? { file, data: validated.data } : null - } catch (e) { - if (e instanceof SyntaxError) return null - throw e - } - } - - const modified = new ModifiedLookup(client) - - const [err] = await tryCatch( - Promise.all( - templates.map(async (template) => { - if (!('steps' in template.data)) { - if (!template.data.transloadit_template_id) { - throw new Error(`Template file has no id and no steps: ${template.file}`) - } - return download(template) - } - - if (!template.data.transloadit_template_id) return upload(template) - - const stats = await fsp.stat(template.file) - const fileModified = stats.mtime - - let templateModified: Date - const templateId = template.data.transloadit_template_id - try { - await client.getTemplate(templateId) - templateModified = await new Promise((resolve, reject) => - modified.byId(templateId, (err, res) => { - if (err) { - reject(err) - } else if (res) { - resolve(res) - } else { - reject(new Error('No date returned')) - } - }), - ) - } catch (err) { - if (isTransloaditAPIError(err)) { - if (err.code === 'SERVER_404' || (err.response && err.response.statusCode === 404)) { - throw new Error(`Template file references nonexistent template: ${template.file}`) - } - } - throw err - } - - if (fileModified > templateModified) return upload(template) - return download(template) - }), - ), - ) - if (err) { - output.error(err) - throw err - } - - async function upload(template: TemplateFile): Promise { - const params = { - name: path.basename(template.file, '.json'), - template: { steps: template.data.steps } as TemplateContent, - } - - if (!template.data.transloadit_template_id) { - const result = await client.createTemplate(params) - template.data.transloadit_template_id = result.id - await fsp.writeFile(template.file, JSON.stringify(template.data)) - return - } - - await client.editTemplate(template.data.transloadit_template_id, params) - } - - async function download(template: TemplateFile): Promise { - const templateId = template.data.transloadit_template_id - if (!templateId) { - throw new Error('Cannot download template without id') - } - - const result = await client.getTemplate(templateId) - - // Use empty object if template has no steps (undefined would be stripped by JSON.stringify) - template.data.steps = result.content.steps ?? {} - const file = path.join(path.dirname(template.file), `${result.name}.json`) - - await fsp.writeFile(template.file, JSON.stringify(template.data)) - - if (file !== template.file) { - await fsp.rename(template.file, file) - } - } -} diff --git a/test/e2e/cli/assemblies-create.test.ts b/test/e2e/cli/assemblies-create.test.ts index a8681bab..a675d1f7 100644 --- a/test/e2e/cli/assemblies-create.test.ts +++ b/test/e2e/cli/assemblies-create.test.ts @@ -5,7 +5,7 @@ import { promisify } from 'node:util' import { imageSize } from 'image-size' import rreaddir from 'recursive-readdir' import { describe, expect, it } from 'vitest' -import assembliesCreate from '../../../src/cli/assemblies-create.ts' +import { create as assembliesCreate } from '../../../src/cli/commands/assemblies.ts' import OutputCtl from './OutputCtl.ts' import type { OutputEntry } from './test-utils.ts' import { testCase } from './test-utils.ts' diff --git a/test/e2e/cli/assemblies-list.test.ts b/test/e2e/cli/assemblies-list.test.ts index 1d58140b..48391596 100644 --- a/test/e2e/cli/assemblies-list.test.ts +++ b/test/e2e/cli/assemblies-list.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from 'vitest' -import * as assemblies from '../../../src/cli/assemblies.ts' +import * as assemblies from '../../../src/cli/commands/assemblies.ts' import OutputCtl from './OutputCtl.ts' import type { OutputEntry } from './test-utils.ts' import { testCase } from './test-utils.ts' diff --git a/test/e2e/cli/assemblies.test.ts b/test/e2e/cli/assemblies.test.ts index b769f63b..0264f2ba 100644 --- a/test/e2e/cli/assemblies.test.ts +++ b/test/e2e/cli/assemblies.test.ts @@ -1,6 +1,6 @@ import fsp from 'node:fs/promises' import { describe, expect, it } from 'vitest' -import * as assemblies from '../../../src/cli/assemblies.ts' +import * as assemblies from '../../../src/cli/commands/assemblies.ts' import { zip } from '../../../src/cli/helpers.ts' import OutputCtl from './OutputCtl.ts' import type { OutputEntry } from './test-utils.ts' diff --git a/test/e2e/cli/bills.test.ts b/test/e2e/cli/bills.test.ts index 970d4634..7956637a 100644 --- a/test/e2e/cli/bills.test.ts +++ b/test/e2e/cli/bills.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from 'vitest' -import * as bills from '../../../src/cli/bills.ts' +import * as bills from '../../../src/cli/commands/bills.ts' import OutputCtl from './OutputCtl.ts' import type { OutputEntry } from './test-utils.ts' import { testCase } from './test-utils.ts' diff --git a/test/e2e/cli/templates.test.ts b/test/e2e/cli/templates.test.ts index 7becd94d..8d74d97d 100644 --- a/test/e2e/cli/templates.test.ts +++ b/test/e2e/cli/templates.test.ts @@ -3,7 +3,7 @@ import path from 'node:path' import { afterAll, beforeAll, describe, expect, it } from 'vitest' import type { TemplateContent } from '../../../src/apiTypes.ts' import { zip } from '../../../src/cli/helpers.ts' -import * as templates from '../../../src/cli/templates.ts' +import * as templates from '../../../src/cli/commands/templates.ts' import { Transloadit as TransloaditClient } from '../../../src/Transloadit.ts' import OutputCtl from './OutputCtl.ts' import type { OutputEntry } from './test-utils.ts' From 6386b775cc95183f2f0e0d1da1f616c235205cb9 Mon Sep 17 00:00:00 2001 From: Kevin van Zonneveld Date: Thu, 4 Dec 2025 17:48:54 +0100 Subject: [PATCH 45/45] format --- src/cli/commands/assemblies.ts | 1 - test/e2e/cli/templates.test.ts | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/src/cli/commands/assemblies.ts b/src/cli/commands/assemblies.ts index 0a0e307d..bab5bbc2 100644 --- a/src/cli/commands/assemblies.ts +++ b/src/cli/commands/assemblies.ts @@ -23,7 +23,6 @@ import type { IOutputCtl } from '../OutputCtl.ts' import { ensureError, isErrnoException } from '../types.ts' import { AuthenticatedCommand } from './BaseCommand.ts' - // --- From assemblies.ts: Schemas and interfaces --- export interface AssemblyListOptions { before?: string diff --git a/test/e2e/cli/templates.test.ts b/test/e2e/cli/templates.test.ts index 8d74d97d..022f1cfc 100644 --- a/test/e2e/cli/templates.test.ts +++ b/test/e2e/cli/templates.test.ts @@ -2,8 +2,8 @@ import fsp from 'node:fs/promises' import path from 'node:path' import { afterAll, beforeAll, describe, expect, it } from 'vitest' import type { TemplateContent } from '../../../src/apiTypes.ts' -import { zip } from '../../../src/cli/helpers.ts' import * as templates from '../../../src/cli/commands/templates.ts' +import { zip } from '../../../src/cli/helpers.ts' import { Transloadit as TransloaditClient } from '../../../src/Transloadit.ts' import OutputCtl from './OutputCtl.ts' import type { OutputEntry } from './test-utils.ts'