diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 0000000..c1a6f66 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,4 @@ +{ + "singleQuote": true, + "trailingComma": "es5" +} diff --git a/package.json b/package.json index 3becb42..9cfe4d1 100644 --- a/package.json +++ b/package.json @@ -25,20 +25,21 @@ "scripts": { "prebuild": "rimraf dist", "build": "tsc -p tsconfig.build.json", - "test": "jest --coverage --verbose", + "test": "jest --coverage --verbose --testPathIgnorePatterns=e2e", "test:fixtures": "jest --testPathPattern=integration", - "test:watch": "jest --watch", + "test:e2e": "npm run build && jest --testPathPattern=e2e", + "test:watch": "jest --watch --testPathIgnorePatterns=e2e", "lint": "eslint '{src,test}/**/*.ts'", "lint:fix": "eslint '{src,test}/**/*.ts' --fix" }, "dependencies": { - "jscodeshift": "^17.3.0", - "commander": "^14.0.2", - "inquirer": "^8.2.5", "chalk": "^4.1.2", - "ora": "^5.4.1", + "commander": "^14.0.2", "glob": "^10.0.0", + "inquirer": "^8.2.5", "is-git-clean": "^1.1.0", + "jscodeshift": "^17.3.0", + "ora": "^5.4.1", "typescript": "~5.2.2" }, "devDependencies": { @@ -48,9 +49,9 @@ "@semantic-release/changelog": "^6.0.3", "@semantic-release/exec": "^6.0.3", "@semantic-release/git": "^10.0.1", - "@types/jscodeshift": "^0.11.0", "@types/inquirer": "^9.0.0", "@types/jest": "^29.5.14", + "@types/jscodeshift": "^0.11.0", "@types/node": "^20.0.0", "@typescript-eslint/eslint-plugin": "^8.19.1", "@typescript-eslint/parser": "^8.19.1", @@ -59,6 +60,7 @@ "globals": "^15.14.0", "jest": "^29.7.0", "jest-junit": "^16.0.0", + "prettier": "^3.7.4", "rimraf": "^5.0.0", "semantic-release": "^24.2.0", "ts-jest": "^29.2.5", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f44681a..460733d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -82,6 +82,9 @@ devDependencies: jest-junit: specifier: ^16.0.0 version: 16.0.0 + prettier: + specifier: ^3.7.4 + version: 3.7.4 rimraf: specifier: ^5.0.0 version: 5.0.10 @@ -4785,6 +4788,12 @@ packages: engines: {node: '>= 0.8.0'} dev: true + /prettier@3.7.4: + resolution: {integrity: sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==} + engines: {node: '>=14'} + hasBin: true + dev: true + /pretty-format@29.7.0: resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} diff --git a/src/cli.ts b/src/cli.ts index ba9d9ed..3d9989e 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -25,6 +25,7 @@ program .option('-p, --print', 'Print transformed files to stdout, useful for development', false) .option('--verbose', 'Show more information about the transform process', false) .option('--parser ', 'Parser to use (tsx, ts, babel)', 'tsx') + .option('--allow-critical-errors', 'Allow writes even when critical validation errors are present', false) .action( async ( codemodArg: string | undefined, diff --git a/src/runner.ts b/src/runner.ts index 46e75d3..eb36fcb 100644 --- a/src/runner.ts +++ b/src/runner.ts @@ -8,7 +8,7 @@ import type { } from './types'; import type { Logger } from './utils/logger'; import type { TransformInfo } from './transforms'; -import { createFileProcessor } from './utils/file-processor'; +import { createFileProcessor, type FileInfo } from './utils/file-processor'; /** * Run the transformation on the target path @@ -24,47 +24,37 @@ export async function runTransform( const applyTransform = transformModule.applyTransform; if (!applyTransform) { - throw new Error(`Transform ${transformInfo.name} does not export applyTransform function`); + throw new Error( + `Transform ${transformInfo.name} does not export applyTransform function` + ); } // Step 1: Discover files logger.startSpinner('Discovering files..'); const fileProcessor = createFileProcessor({ extensions: ['.ts', '.tsx'], - ignorePatterns: [ - '**/node_modules/**', - '**/dist/**', - '**/*.d.ts', - ], + ignorePatterns: ['**/node_modules/**', '**/dist/**', '**/*.d.ts'], }); const allFiles = await fileProcessor.discoverFiles(targetPath); logger.succeedSpinner(`Found ${allFiles.length} files`); - // Step 2: Filter for source framework files - logger.startSpinner('Analyzing source framework usage..'); - const sourceFiles = fileProcessor.filterSourceFiles(allFiles); - - if (sourceFiles.length === 0) { - logger.warnSpinner('No source framework files found'); - logger.info('No files contain source framework imports. Migration not needed.'); - return createEmptySummary(); - } - - logger.succeedSpinner(`${sourceFiles.length} files contain source framework imports`); - logger.subsection(`${allFiles.length - sourceFiles.length} files skipped (no source imports)`); - - logger.newline(); - - // Step 3: Transform files + // Step 2: Transform files logger.section('🔄 Transforming files...'); const results: TransformResult[] = []; let filesTransformed = 0; let totalErrors = 0; let totalWarnings = 0; - - for (const filePath of sourceFiles) { - const result = await transformFile(filePath, applyTransform, options, logger); + let sourceFilesCount = 0; + + for (const fileInfo of fileProcessor.filterSourceFiles(allFiles)) { + sourceFilesCount++; + const result = await transformFile( + fileInfo, + applyTransform, + options, + logger + ); results.push(result); if (result.transformed) { @@ -75,20 +65,47 @@ export async function runTransform( totalWarnings += result.warnings.length; } - // Step 4: Report summary + // Check if any files were found + if (sourceFilesCount === 0) { + logger.warnSpinner('No source framework files found'); + logger.info( + 'No files contain source framework imports. Migration not needed.' + ); + return createEmptySummary(); + } + + logger.succeedSpinner( + `${sourceFilesCount} files contain source framework imports` + ); + logger.subsection( + `${allFiles.length - sourceFilesCount} files skipped (no source imports)` + ); + logger.newline(); + + // Step 3: Report summary logger.newline(); logger.section('📊 Migration Summary'); if (filesTransformed > 0) { - logger.success(`${filesTransformed} file${filesTransformed > 1 ? 's' : ''} transformed successfully`); + logger.success( + `${filesTransformed} file${ + filesTransformed > 1 ? 's' : '' + } transformed successfully` + ); } - if (sourceFiles.length - filesTransformed > 0) { - logger.info(` ${sourceFiles.length - filesTransformed} file${sourceFiles.length - filesTransformed > 1 ? 's' : ''} skipped (no changes needed)`); + if (sourceFilesCount - filesTransformed > 0) { + logger.info( + ` ${sourceFilesCount - filesTransformed} file${ + sourceFilesCount - filesTransformed > 1 ? 's' : '' + } skipped (no changes needed)` + ); } if (totalWarnings > 0) { - logger.warn(`${totalWarnings} warning${totalWarnings > 1 ? 's' : ''} found`); + logger.warn( + `${totalWarnings} warning${totalWarnings > 1 ? 's' : ''} found` + ); } if (totalErrors > 0) { @@ -114,11 +131,9 @@ export async function runTransform( } return { - filesProcessed: sourceFiles.length, + filesProcessed: sourceFilesCount, filesTransformed, - filesSkipped: sourceFiles.length - filesTransformed, - importsUpdated: 0, // TODO: Track this from transformers - mocksConfigured: 0, // TODO: Track this from transformers + filesSkipped: sourceFilesCount - filesTransformed, errors: totalErrors, warnings: totalWarnings, results, @@ -129,13 +144,16 @@ export async function runTransform( * Transform a single file */ async function transformFile( - filePath: string, - applyTransform: (source: string, options?: { skipValidation?: boolean; parser?: string }) => any, + fileInfo: FileInfo, + applyTransform: ( + source: string, + options?: { skipValidation?: boolean; parser?: string } + ) => any, options: CliOptions, logger: Logger ): Promise { const result: TransformResult = { - filePath, + filePath: fileInfo.path, transformed: false, changes: [], warnings: [], @@ -143,9 +161,6 @@ async function transformFile( }; try { - // Read source file - let source = await fs.readFile(filePath, 'utf-8'); - // Note: Preprocessing has been disabled because the parser fallback strategy // now uses ts/tsx parsers first, which handle TypeScript syntax correctly. // The preprocessing was breaking valid TypeScript generic syntax like: @@ -157,13 +172,15 @@ async function transformFile( // more carefully to avoid breaking valid TypeScript patterns. // Apply transformation - const transformOutput = applyTransform(source, { + const transformOutput = applyTransform(fileInfo.source, { parser: options.parser, }); // Check if code actually changed - if (transformOutput.code === source) { - logger.debug(` ⊘ ${path.relative(process.cwd(), filePath)} (no changes)`); + if (transformOutput.code === fileInfo.source) { + logger.debug( + ` ⊘ ${path.relative(process.cwd(), fileInfo.path)} (no changes)` + ); return result; } @@ -178,93 +195,56 @@ async function transformFile( result.warnings.push(`${warn.rule}: ${warn.message}`); }); - transformOutput.validation.criticalErrors.forEach((err: ValidationError) => { - result.errors.push(`[CRITICAL] ${err.rule}: ${err.message}`); - }); + transformOutput.validation.criticalErrors.forEach( + (err: ValidationError) => { + result.errors.push(`[CRITICAL] ${err.rule}: ${err.message}`); + } + ); + + // Skip write if there are critical errors (unless explicitly allowed) + const hasCriticalErrors = + transformOutput.validation.criticalErrors.length > 0; + if (hasCriticalErrors && !options.allowCriticalErrors) { + logger.error( + ` ✗ ${path.relative( + process.cwd(), + fileInfo.path + )} (skipped due to critical errors)` + ); + result.changes.push('Skipped (critical validation errors)'); + return result; + } // Handle --print flag (output to stdout instead of writing) if (options.print) { logger.info(`\n${'='.repeat(60)}`); - logger.info(`File: ${filePath}`); + logger.info(`File: ${fileInfo.path}`); logger.info('='.repeat(60)); console.log(transformOutput.code); logger.info('='.repeat(60)); result.changes.push('Printed to stdout'); } else if (!options.dry) { // Write transformed file - await fs.writeFile(filePath, transformOutput.code, 'utf-8'); + await fs.writeFile(fileInfo.path, transformOutput.code, 'utf-8'); result.changes.push('File updated'); - logger.success(` ${path.relative(process.cwd(), filePath)}`); + logger.success(` ${path.relative(process.cwd(), fileInfo.path)}`); } else { // Dry run - just report what would change result.changes.push('Would be updated (dry)'); - logger.info(` ~ ${path.relative(process.cwd(), filePath)} (dry)`); + logger.info(` ~ ${path.relative(process.cwd(), fileInfo.path)} (dry)`); } } catch (error) { const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'; result.errors.push(errorMessage); - logger.error(` ${path.relative(process.cwd(), filePath)}: ${errorMessage}`); + logger.error( + ` ${path.relative(process.cwd(), fileInfo.path)}: ${errorMessage}` + ); } return result; } -/** - * Pre-process TypeScript import alias declarations - * Converts: import X = jest.Y; → const X = jest.Y; - * This syntax is TypeScript-specific and causes babel parser to fail - */ -// eslint-disable-next-line @typescript-eslint/no-unused-vars -function preprocessImportAliases(source: string): string { - // Match: import identifier = jest.something; - // or: import identifier = Sinon.something; - return source.replace( - /^import\s+([a-zA-Z_$][a-zA-Z0-9_$]*)\s*=\s*(jest|sinon|Sinon)\.([a-zA-Z_$][a-zA-Z0-9_$]*)\s*;/gm, - 'const $1 = $2.$3;' - ); -} - -/** - * Pre-process source to convert old-style type casts that confuse the parser - * Converts: value → value as Type - * This prevents parse errors in .tsx files where <> is ambiguous (JSX vs type cast) - */ -// eslint-disable-next-line @typescript-eslint/no-unused-vars -function preprocessTypeCasts(source: string): string { - // Match common patterns: - // identifier - // {} or { ... } - // [] or [...] - // (...) - - // Pattern 1: identifier or identifier.property - let result = source.replace( - /<([A-Z][a-zA-Z0-9_<>[\],\s|&]*)>([a-zA-Z_$][a-zA-Z0-9_$]*(?:\.[a-zA-Z_$][a-zA-Z0-9_$]*)*)/g, - '$2 as $1' - ); - - // Pattern 2: {} or { ... } - result = result.replace( - /<([A-Z][a-zA-Z0-9_<>[\],\s|&]*)>(\{[^}]*\})/g, - '($2 as $1)' - ); - - // Pattern 3: [] or [...] - result = result.replace( - /<([A-Z][a-zA-Z0-9_<>[\],\s|&]*)>(\[[^\]]*\])/g, - '($2 as $1)' - ); - - // Pattern 4: (...) - result = result.replace( - /<([A-Z][a-zA-Z0-9_<>[\],\s|&]*)>(\([^)]*\))/g, - '($2 as $1)' - ); - - return result; -} - /** * Create an empty migration summary */ @@ -273,8 +253,6 @@ function createEmptySummary(): MigrationSummary { filesProcessed: 0, filesTransformed: 0, filesSkipped: 0, - importsUpdated: 0, - mocksConfigured: 0, errors: 0, warnings: 0, results: [], diff --git a/src/transform.ts b/src/transform.ts index 0b14c86..ae0d1ed 100644 --- a/src/transform.ts +++ b/src/transform.ts @@ -1,7 +1,7 @@ import jscodeshift from 'jscodeshift'; import type { AnalysisContext, TransformOutput } from './types'; import { detectSuitesContext } from './analyzers/context-detector'; -import { detectRetrievals} from './analyzers/retrieval-detector'; +import { detectRetrievals } from './analyzers/retrieval-detector'; import { analyzeAllMockConfigurations } from './analyzers/stub-detector'; import { transformImports } from './transforms/import-transformer'; import { transformTestBed } from './transforms/testbed-transformer'; @@ -26,14 +26,18 @@ export function applyTransform( if (source.length === 0) { return { code: source, - validation: { success: true, errors: [], warnings: [], criticalErrors: [] }, + validation: { + success: true, + errors: [], + warnings: [], + criticalErrors: [], + }, }; } // Parse with fallback strategy const { j, root } = parseSourceWithFallback(source, options?.parser); - // Phase 1: Analysis const context: AnalysisContext = { isSuitesContext: detectSuitesContext(source), @@ -71,10 +75,7 @@ export function applyTransform( cleanupObsoleteTypeCasts(j, root); // Phase 6: Post-transformation validation - const transformedSource = root.toSource({ - quote: 'single', - trailingComma: true, - }); + const transformedSource = root.toSource(); const validation = options?.skipValidation ? { success: true, errors: [], warnings: [], criticalErrors: [] } @@ -141,6 +142,6 @@ function parseSourceWithFallback( // All parsers failed - throw detailed error throw new Error( `Failed to parse source code with any available parser (tried: ${parsersToTry.join(', ')}). ` + - `Last error: ${lastError?.message || 'Unknown error'}` + `Last error: ${lastError?.message || 'Unknown error'}` ); } diff --git a/src/types.ts b/src/types.ts index 9496a72..5bb4b5d 100644 --- a/src/types.ts +++ b/src/types.ts @@ -9,6 +9,7 @@ export interface CliOptions { parser: string; print: boolean; verbose: boolean; + allowCriticalErrors: boolean; } /** @@ -106,8 +107,6 @@ export interface MigrationSummary { filesProcessed: number; filesTransformed: number; filesSkipped: number; - importsUpdated: number; - mocksConfigured: number; errors: number; warnings: number; results: TransformResult[]; diff --git a/src/utils/file-processor.ts b/src/utils/file-processor.ts index d29e8cf..fafa8e0 100644 --- a/src/utils/file-processor.ts +++ b/src/utils/file-processor.ts @@ -8,6 +8,11 @@ export interface FileProcessorOptions { sourceImportPattern?: RegExp; } +export interface FileInfo { + path: string; + source: string; +} + export class FileProcessor { private options: FileProcessorOptions; @@ -52,9 +57,7 @@ export class FileProcessor { * Glob for files in a directory */ private async globFiles(directory: string): Promise { - const patterns = this.options.extensions.map( - (ext) => `**/*${ext}` - ); + const patterns = this.options.extensions.map((ext) => `**/*${ext}`); const files: string[] = []; @@ -108,12 +111,16 @@ export class FileProcessor { /** * Filter files that contain source framework imports * Default pattern matches Automock imports for backward compatibility + * Yields file info objects with path and source content to avoid double reads + * Uses a generator for memory efficiency with large file sets */ - filterSourceFiles(files: string[]): string[] { - return files.filter((filePath) => { + *filterSourceFiles(files: string[]): Generator { + for (const filePath of files) { const content = this.readFile(filePath); - return this.hasSourceImport(content); - }); + if (this.hasSourceImport(content)) { + yield { path: filePath, source: content }; + } + } } /** @@ -121,7 +128,8 @@ export class FileProcessor { * Default pattern matches Automock imports for backward compatibility */ private hasSourceImport(content: string): boolean { - const importPattern = this.options.sourceImportPattern || + const importPattern = + this.options.sourceImportPattern || /@automock\/(jest|sinon|core)['"]|from\s+['"]@automock\/(jest|sinon|core)['"]/; return importPattern.test(content); } diff --git a/src/utils/logger.ts b/src/utils/logger.ts index cab7266..b5e9297 100644 --- a/src/utils/logger.ts +++ b/src/utils/logger.ts @@ -124,15 +124,11 @@ export class Logger { */ summary(stats: { filesTransformed: number; - importsUpdated: number; - mocksConfigured: number; errors: number; warnings: number; }): void { this.section('✅ Migration complete!'); this.subsection(`${stats.filesTransformed} files transformed`); - this.subsection(`${stats.importsUpdated} imports updated`); - this.subsection(`${stats.mocksConfigured} mocks configured`); if (stats.errors > 0) { this.subsection(chalk.red(`${stats.errors} errors`)); } diff --git a/test/__snapshots__/parse-errors.spec.ts.snap b/test/__snapshots__/parse-errors.spec.ts.snap index 390ccdf..231c82b 100644 --- a/test/__snapshots__/parse-errors.spec.ts.snap +++ b/test/__snapshots__/parse-errors.spec.ts.snap @@ -1,7 +1,7 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP exports[`Parse Error Handling Category 1: Multi-Variable Declarations with Generics (18 files) should parse and transform files with multi-variable let declarations containing jest.Mocked generics 1`] = ` -"import { TestBed, type Mocked } from '@suites/unit'; +"import { TestBed, type Mocked } from "@suites/unit"; let service: InternalTransactionUpdateService, chargeService: Mocked, @@ -24,7 +24,7 @@ describe('Test', () => { `; exports[`Parse Error Handling Category 2: TypeScript Type Assertions in Generics (3 files) should parse and transform files with type assertions in expect().resolves.toStrictEqual 1`] = ` -"import { TestBed, type Mocked } from '@suites/unit'; +"import { TestBed, type Mocked } from "@suites/unit"; describe('TypeAssertionTests', () => { let service: PaymentService; @@ -54,7 +54,7 @@ describe('TypeAssertionTests', () => { `; exports[`Parse Error Handling Category 3: Function Call Syntax Issues (3 files) should parse and transform files with spread syntax in function parameters 1`] = ` -"import { TestBed } from '@suites/unit'; +"import { TestBed } from "@suites/unit"; describe('SpreadParamTests', () => { const payoutsFactory = ( @@ -78,7 +78,7 @@ describe('SpreadParamTests', () => { `; exports[`Parse Error Handling Real-world Integration: Multiple Parse Error Categories should parse and transform complex files combining multiple error patterns 1`] = ` -"import { TestBed, type Mocked } from '@suites/unit'; +"import { TestBed, type Mocked } from "@suites/unit"; let service: MyService, mockRepo: Mocked, diff --git a/test/e2e/cli-e2e.spec.ts b/test/e2e/cli-e2e.spec.ts new file mode 100644 index 0000000..d4d34bc --- /dev/null +++ b/test/e2e/cli-e2e.spec.ts @@ -0,0 +1,219 @@ +/** + * End-to-end tests for the CLI + * + * These tests run the actual CLI command on fixture files + * to verify the complete transformation pipeline works correctly. + */ + +import { execSync } from 'child_process'; +import { mkdtemp, writeFile, readFile, rm } from 'fs/promises'; +import { join } from 'path'; +import { tmpdir } from 'os'; +import { loadFixturePair } from '../utils/fixture-loader'; + +describe('CLI E2E Tests', () => { + let tempDir: string; + + beforeEach(async () => { + // Create a temporary directory for each test + tempDir = await mkdtemp(join(tmpdir(), 'codemod-e2e-')); + }); + + afterEach(async () => { + // Clean up temporary directory + try { + await rm(tempDir, { recursive: true, force: true }); + } catch { + // Ignore cleanup errors + } + }); + + describe('Basic Transformations', () => { + it('should transform a simple fixture file via CLI', async () => { + const fixtures = loadFixturePair('complex-impl'); + + // Write input file to temp directory + const inputFile = join(tempDir, 'test.ts'); + await writeFile(inputFile, fixtures.input, 'utf-8'); + + // Run CLI in dry mode + const cliPath = join(__dirname, '../../bin/suites-codemod.js'); + const result = execSync( + `node ${cliPath} automock/2/to-suites-v3 ${tempDir} --dry --force`, + { + cwd: process.cwd(), + encoding: 'utf-8', + stdio: 'pipe', + } + ); + + // Verify CLI ran successfully + expect(result).toBeDefined(); + expect(result).toMatch(/file.*transformed|files transformed/i); + }); + + it('should actually transform files when not in dry mode', async () => { + const fixtures = loadFixturePair('complex-impl'); + + // Write input file to temp directory + const inputFile = join(tempDir, 'test.ts'); + await writeFile(inputFile, fixtures.input, 'utf-8'); + + // Run CLI (not in dry mode, with force to skip git check) + const cliPath = join(__dirname, '../../bin/suites-codemod.js'); + execSync(`node ${cliPath} automock/2/to-suites-v3 ${tempDir} --force`, { + cwd: process.cwd(), + encoding: 'utf-8', + stdio: 'pipe', + }); + + // Read the transformed file + const transformedContent = await readFile(inputFile, 'utf-8'); + + // Verify transformation occurred + expect(transformedContent).toContain('@suites/unit'); + expect(transformedContent).not.toContain('@automock/jest'); + expect(transformedContent).toContain('TestBed.solitary'); + expect(transformedContent).not.toContain('TestBed.create'); + expect(transformedContent).toContain('.impl('); + expect(transformedContent).not.toContain('.using('); + }); + + it('should transform multiple files in a directory', async () => { + const fixtures1 = loadFixturePair('token-injection'); + const fixtures2 = loadFixturePair('complex-impl'); + + // Write multiple input files + await writeFile(join(tempDir, 'test1.ts'), fixtures1.input, 'utf-8'); + await writeFile(join(tempDir, 'test2.ts'), fixtures2.input, 'utf-8'); + + // Run CLI + const cliPath = join(__dirname, '../../bin/suites-codemod.js'); + const result = execSync( + `node ${cliPath} automock/2/to-suites-v3 ${tempDir} --force`, + { + cwd: process.cwd(), + encoding: 'utf-8', + stdio: 'pipe', + } + ); + + // Verify both files were transformed + const transformed1 = await readFile(join(tempDir, 'test1.ts'), 'utf-8'); + const transformed2 = await readFile(join(tempDir, 'test2.ts'), 'utf-8'); + + expect(transformed1).toContain('@suites/unit'); + expect(transformed2).toContain('@suites/unit'); + expect(result).toContain('files transformed'); + }); + }); + + describe('CLI Options', () => { + it('should respect --dry flag and not modify files', async () => { + const fixtures = loadFixturePair('complex-impl'); + + const inputFile = join(tempDir, 'test.ts'); + await writeFile(inputFile, fixtures.input, 'utf-8'); + + // Run CLI in dry mode + const cliPath = join(__dirname, '../../bin/suites-codemod.js'); + execSync( + `node ${cliPath} automock/2/to-suites-v3 ${tempDir} --dry --force`, + { + cwd: process.cwd(), + encoding: 'utf-8', + stdio: 'pipe', + } + ); + + // Verify file was NOT modified + const content = await readFile(inputFile, 'utf-8'); + expect(content).toBe(fixtures.input); + expect(content).toContain('@automock/jest'); + }); + + it('should respect --print flag and output to stdout', async () => { + const fixtures = loadFixturePair('complex-impl'); + + const inputFile = join(tempDir, 'test.ts'); + await writeFile(inputFile, fixtures.input, 'utf-8'); + + // Run CLI with print flag + const cliPath = join(__dirname, '../../bin/suites-codemod.js'); + const result = execSync( + `node ${cliPath} automock/2/to-suites-v3 ${inputFile} --print --force`, + { + cwd: process.cwd(), + encoding: 'utf-8', + stdio: 'pipe', + } + ); + + // Verify output contains transformed code + expect(result).toContain('@suites/unit'); + expect(result).toContain('TestBed.solitary'); + + // Verify file was NOT modified (print mode doesn't write) + const content = await readFile(inputFile, 'utf-8'); + expect(content).toBe(fixtures.input); + }); + + it('should handle --verbose flag', async () => { + const fixtures = loadFixturePair('complex-impl'); + + const inputFile = join(tempDir, 'test.ts'); + await writeFile(inputFile, fixtures.input, 'utf-8'); + + // Run CLI with verbose flag + const cliPath = join(__dirname, '../../bin/suites-codemod.js'); + const result = execSync( + `node ${cliPath} automock/2/to-suites-v3 ${tempDir} --verbose --force --dry`, + { + cwd: process.cwd(), + encoding: 'utf-8', + stdio: 'pipe', + } + ); + + // Verify verbose output is present + expect(result).toBeDefined(); + // jscodeshift will output processing information + }); + }); + + describe('Error Handling', () => { + it('should exit with error code for invalid codemod', () => { + const cliPath = join(__dirname, '../../bin/suites-codemod.js'); + + expect(() => { + execSync(`node ${cliPath} invalid/codemod ${tempDir} --force`, { + cwd: process.cwd(), + encoding: 'utf-8', + stdio: 'pipe', + }); + }).toThrow(); + }); + + it('should exit with error code for non-existent path', () => { + const cliPath = join(__dirname, '../../bin/suites-codemod.js'); + + try { + execSync( + `node ${cliPath} automock/2/to-suites-v3 /non/existent/path --force`, + { + cwd: process.cwd(), + encoding: 'utf-8', + stdio: 'pipe', + } + ); + // If we get here, the command didn't throw - that's also acceptable + // as jscodeshift might handle missing paths gracefully + } catch (error: any) { + // Command threw an error, which is expected + expect(error).toBeDefined(); + // Verify it's an error (exit code non-zero or error message) + expect(error.status || error.message).toBeDefined(); + } + }); + }); +}); diff --git a/test/integration/__snapshots__/snapshot-tests.spec.ts.snap b/test/integration/__snapshots__/snapshot-tests.spec.ts.snap index 707dbc2..593115c 100644 --- a/test/integration/__snapshots__/snapshot-tests.spec.ts.snap +++ b/test/integration/__snapshots__/snapshot-tests.spec.ts.snap @@ -1,7 +1,7 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP exports[`Snapshot Tests Basic Examples from Specification should transform complex mock with .impl() and retrieval 1`] = ` -"import { TestBed, type Mocked } from '@suites/unit'; +"import { TestBed, type Mocked } from "@suites/unit"; import { jest } from '@jest/globals'; describe('OrderService', () => { @@ -14,11 +14,11 @@ describe('OrderService', () => { .mock(OrderRepository) .impl(stubFn => ({ findById: stubFn(), - save: stubFn().mockResolvedValue(true), + save: stubFn().mockResolvedValue(true) })) .mock(PaymentGateway) .impl(stubFn => ({ - charge: stubFn().mockResolvedValue({ success: true }), + charge: stubFn().mockResolvedValue({ success: true }) })) .compile(); @@ -66,7 +66,7 @@ describe('UserService', () => { `; exports[`Snapshot Tests Basic Examples from Specification should transform token injection 1`] = ` -"import { TestBed } from '@suites/unit'; +"import { TestBed } from "@suites/unit"; describe('ApiService', () => { let service: ApiService; @@ -86,7 +86,7 @@ describe('ApiService', () => { `; exports[`Snapshot Tests Edge Cases should handle various edge cases correctly 1`] = ` -"import { TestBed } from '@suites/unit'; +"import { TestBed } from "@suites/unit"; describe('Edge Cases', () => { // Already async function @@ -130,7 +130,7 @@ describe('Edge Cases', () => { `; exports[`Snapshot Tests Mixed .impl() and .final() should correctly apply .impl() to retrieved mocks and .final() to others 1`] = ` -"import { TestBed, type Mocked } from '@suites/unit'; +"import { TestBed, type Mocked } from "@suites/unit"; describe('NotificationService', () => { let service: NotificationService; @@ -141,7 +141,7 @@ describe('NotificationService', () => { .mock(EmailService) .impl(stubFn => ({ send: stubFn(), - validate: stubFn(), + validate: stubFn() })) .mock(Config) .final({ @@ -156,7 +156,7 @@ describe('NotificationService', () => { }) .mock(Logger) .impl(stubFn => ({ - log: stubFn(), + log: stubFn() })) .compile(); @@ -176,7 +176,7 @@ describe('NotificationService', () => { `; exports[`Snapshot Tests Multiple Test Hooks should transform TestBed in beforeAll, beforeEach, and test blocks 1`] = ` -"import { TestBed } from '@suites/unit'; +"import { TestBed } from "@suites/unit"; describe('TaskService', () => { let service: TaskService; @@ -185,7 +185,7 @@ describe('TaskService', () => { const { unit } = await TestBed.solitary(TaskService) .mock(TaskRepository) .impl(stubFn => ({ - findAll: stubFn(), + findAll: stubFn() })) .compile(); @@ -206,7 +206,7 @@ describe('TaskService', () => { const { unit, unitRef } = await TestBed.solitary(TaskService) .mock(TaskRepository) .impl(stubFn => ({ - create: stubFn(), + create: stubFn() })) .compile(); @@ -228,7 +228,7 @@ describe('TaskService', () => { `; exports[`Snapshot Tests Sinon Framework should transform Sinon-based tests 1`] = ` -"import { TestBed, type Mocked } from '@suites/unit'; +"import { TestBed, type Mocked } from "@suites/unit"; describe('PaymentService', () => { let service: PaymentService; @@ -240,12 +240,12 @@ describe('PaymentService', () => { .mock(PaymentGateway) .impl(stubFn => ({ charge: stubFn(), - refund: stubFn(), + refund: stubFn() })) .mock(Logger) .impl(stubFn => ({ info: stubFn(), - error: stubFn(), + error: stubFn() })) .compile(); @@ -267,7 +267,7 @@ describe('PaymentService', () => { `; exports[`Snapshot Tests Type Cast Cleanup should remove obsolete type casts 1`] = ` -"import { TestBed, type Mocked } from '@suites/unit'; +"import { TestBed, type Mocked } from "@suites/unit"; describe('UserController', () => { let controller: UserController; @@ -303,7 +303,7 @@ describe('UserController', () => { `; exports[`Snapshot Tests UnitReference Usage should handle UnitReference imports and usage 1`] = ` -"import { TestBed, type UnitReference, type Mocked } from '@suites/unit'; +"import { TestBed, type UnitReference, type Mocked } from "@suites/unit"; describe('ProductService', () => { let service: ProductService; @@ -316,7 +316,7 @@ describe('ProductService', () => { .impl(stubFn => ({ find: stubFn(), save: stubFn(), - delete: stubFn(), + delete: stubFn() })) .compile();