diff --git a/package.json b/package.json index e8245dc..90e75dc 100644 --- a/package.json +++ b/package.json @@ -38,6 +38,10 @@ "types": "./dist/core/payments/index.d.ts", "default": "./dist/core/payments/index.js" }, + "./core/piece": { + "types": "./dist/core/piece/index.d.ts", + "default": "./dist/core/piece/index.js" + }, "./core/synapse": { "types": "./dist/core/synapse/index.d.ts", "default": "./dist/core/synapse/index.js" @@ -109,8 +113,8 @@ "homepage": "https://github.com/filecoin-project/filecoin-pin#readme", "dependencies": { "@clack/prompts": "^0.11.0", - "@filoz/synapse-core": "^0.1.3", - "@filoz/synapse-sdk": "^0.36.0", + "@filoz/synapse-core": "^0.1.4", + "@filoz/synapse-sdk": "^0.36.1", "@helia/unixfs": "^6.0.1", "@ipld/car": "^5.4.2", "commander": "^14.0.1", diff --git a/src/cli.ts b/src/cli.ts index 2313dd7..6956eee 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -6,6 +6,7 @@ import { addCommand } from './commands/add.js' import { dataSetCommand } from './commands/data-set.js' import { importCommand } from './commands/import.js' import { paymentsCommand } from './commands/payments.js' +import { rmCommand } from './commands/rm.js' import { serverCommand } from './commands/server.js' import { checkForUpdate, type UpdateCheckStatus } from './common/version-check.js' import { version as packageVersion } from './core/utils/version.js' @@ -24,6 +25,7 @@ program.addCommand(paymentsCommand) program.addCommand(dataSetCommand) program.addCommand(importCommand) program.addCommand(addCommand) +program.addCommand(rmCommand) // Default action - show help if no command specified program.action(() => { diff --git a/src/commands/rm.ts b/src/commands/rm.ts new file mode 100644 index 0000000..780af67 --- /dev/null +++ b/src/commands/rm.ts @@ -0,0 +1,19 @@ +import { Command } from 'commander' +import { runRmPiece } from '../rm/index.js' +import { addAuthOptions } from '../utils/cli-options.js' + +export const rmCommand = new Command('rm') + .description('Remove a Piece from a DataSet') + .requiredOption('--piece ', 'Piece CID to remove') + .requiredOption('--data-set ', 'DataSet ID to remove the piece from') + .option('--wait-for-confirmation', 'Wait for transaction confirmation before exiting') + .action(async (options) => { + try { + await runRmPiece(options) + } catch { + // Error already displayed by clack UI in runRmPiece + process.exit(1) + } + }) + +addAuthOptions(rmCommand) diff --git a/src/core/data-set/calculate-actual-storage.ts b/src/core/data-set/calculate-actual-storage.ts index b1b1637..974f668 100644 --- a/src/core/data-set/calculate-actual-storage.ts +++ b/src/core/data-set/calculate-actual-storage.ts @@ -2,9 +2,9 @@ import type { Synapse } from '@filoz/synapse-sdk' import PQueue from 'p-queue' import type { Logger } from 'pino' import { createStorageContextFromDataSetId } from '../synapse/storage-context-helper.js' -import type { ProgressEvent, ProgressEventHandler } from '../utils/types.js' +import type { ProgressEvent, ProgressEventHandler, Warning } from '../utils/types.js' import { getDataSetPieces } from './get-data-set-pieces.js' -import type { DataSetSummary, DataSetWarning } from './types.js' +import type { DataSetSummary } from './types.js' export interface ActualStorageResult { /** Total storage in bytes across all active data sets */ @@ -18,7 +18,7 @@ export interface ActualStorageResult { /** Whether the calculation timed out */ timedOut?: boolean /** Non-fatal warnings encountered during calculation */ - warnings: DataSetWarning[] + warnings: Warning[] } export type ActualStorageProgressEvents = ProgressEvent< @@ -101,7 +101,7 @@ export async function calculateActualStorage( const maxParallelPerProvider = Math.max(1, options?.maxParallelPerProvider ?? 10) const onProgress = options?.onProgress - const warnings: DataSetWarning[] = [] + const warnings: Warning[] = [] let totalBytes = 0n let pieceCount = 0 let dataSetsProcessed = 0 diff --git a/src/core/data-set/get-data-set-pieces.ts b/src/core/data-set/get-data-set-pieces.ts index 82b8f66..847c035 100644 --- a/src/core/data-set/get-data-set-pieces.ts +++ b/src/core/data-set/get-data-set-pieces.ts @@ -7,15 +7,20 @@ */ import { getSizeFromPieceCID } from '@filoz/synapse-core/piece' -import { METADATA_KEYS, type StorageContext, type Synapse, WarmStorageService } from '@filoz/synapse-sdk' +import { + type DataSetPieceData, + METADATA_KEYS, + PDPServer, + PDPVerifier, + type StorageContext, + type Synapse, + WarmStorageService, +} from '@filoz/synapse-sdk' +import { reconcilePieceStatus } from '../piece/piece-status.js' +import type { Warning } from '../utils/types.js' import { isStorageContextWithDataSetId } from './type-guards.js' -import type { - DataSetPiecesResult, - DataSetWarning, - GetDataSetPiecesOptions, - PieceInfo, - StorageContextWithDataSetId, -} from './types.js' +import type { DataSetPiecesResult, GetDataSetPiecesOptions, PieceInfo, StorageContextWithDataSetId } from './types.js' +import { PieceStatus } from './types.js' /** * Get all pieces for a dataset from a StorageContext @@ -57,15 +62,58 @@ export async function getDataSetPieces( } const pieces: PieceInfo[] = [] - const warnings: DataSetWarning[] = [] + const warnings: Warning[] = [] + + // call PDPVerifier.getScheduledRemovals to get the list of pieces that are scheduled for removal + let scheduledRemovals: number[] = [] + let pdpServerPieces: DataSetPieceData[] | null = null + try { + const warmStorage = await WarmStorageService.create(synapse.getProvider(), synapse.getWarmStorageAddress()) + const pdpVerifier = new PDPVerifier(synapse.getProvider(), warmStorage.getPDPVerifierAddress()) + scheduledRemovals = await pdpVerifier.getScheduledRemovals(storageContext.dataSetId) + try { + const providerInfo = await synapse.getProviderInfo(storageContext.provider.serviceProvider) + const pdpServer = new PDPServer(null, providerInfo.products?.PDP?.data?.serviceURL ?? '') + const dataSet = await pdpServer.getDataSet(storageContext.dataSetId) + pdpServerPieces = dataSet.pieces + } catch (error) { + logger?.warn({ error }, 'Failed to fetch provider data for scheduled removals and orphan detection') + warnings.push({ + code: 'PROVIDER_DATA_UNAVAILABLE', + message: 'Failed to fetch provider data; orphan detection disabled', + context: { dataSetId: storageContext.dataSetId, error: String(error) }, + }) + } + } catch (error) { + logger?.warn({ error }, 'Failed to get scheduled removals') + warnings.push({ + code: 'SCHEDULED_REMOVALS_UNAVAILABLE', + message: 'Failed to get scheduled removals', + context: { dataSetId: storageContext.dataSetId, error: String(error) }, + }) + } // Use the async generator to fetch all pieces try { const getPiecesOptions = { ...(signal && { signal }) } + const providerPiecesById = pdpServerPieces ? new Map(pdpServerPieces.map((piece) => [piece.pieceId, piece])) : null for await (const piece of storageContext.getPieces(getPiecesOptions)) { const pieceId = piece.pieceId const pieceCid = piece.pieceCid - const pieceInfo: PieceInfo = { pieceId, pieceCid: pieceCid.toString() } + const { status, warning } = reconcilePieceStatus({ + pieceId, + pieceCid, + scheduledRemovals, + providerPiecesById, + }) + const pieceInfo: PieceInfo = { + pieceId, + pieceCid: pieceCid.toString(), + status, + } + if (warning) { + warnings.push(warning) + } // Calculate piece size from CID try { @@ -79,6 +127,24 @@ export async function getDataSetPieces( pieces.push(pieceInfo) } + if (providerPiecesById !== null) { + // reconcilePieceStatus removes provider matches as we stream on-chain pieces. + // Remaining entries are only reported by the provider, which are off-chain orphans. + for (const piece of providerPiecesById.values()) { + // add the rest of the pieces to the pieces list + pieces.push({ + pieceId: piece.pieceId, + pieceCid: piece.pieceCid.toString(), + status: PieceStatus.OFFCHAIN_ORPHANED, + }) + warnings.push({ + code: 'OFFCHAIN_ORPHANED', + message: 'Piece is reported by provider but not on-chain', + context: { pieceId: piece.pieceId, pieceCid: piece.pieceCid.toString() }, + }) + } + } + pieces.sort((a, b) => a.pieceId - b.pieceId) } catch (error) { if (error instanceof Error && error.name === 'AbortError') { throw error @@ -122,7 +188,7 @@ async function enrichPiecesWithMetadata( synapse: Synapse, storageContext: StorageContextWithDataSetId, pieces: PieceInfo[], - warnings: DataSetWarning[], + warnings: Warning[], logger?: GetDataSetPiecesOptions['logger'] ): Promise { const dataSetId = storageContext.dataSetId diff --git a/src/core/data-set/types.ts b/src/core/data-set/types.ts index 5571703..fc2651c 100644 --- a/src/core/data-set/types.ts +++ b/src/core/data-set/types.ts @@ -10,6 +10,24 @@ import type { EnhancedDataSetInfo, ProviderInfo, StorageContext } from '@filoz/synapse-sdk' import type { Logger } from 'pino' +import type { Warning } from '../utils/types.js' + +/** + * Status of the piece, e.g. "pending removal", "active", "orphaned" + * + * - PENDING_REMOVAL: the piece is scheduled for deletion, but still showing on chain + * - ACTIVE: the piece is active, onchain and known by the provider + * - ONCHAIN_ORPHANED: the piece is not known by the provider, but still on chain + * - OFFCHAIN_ORPHANED: the piece is known by the provider, but not on chain + * + * The orphaned states should not happen, but have been observed and should be logged and displayed to the user. + */ +export enum PieceStatus { + ACTIVE = 'ACTIVE', + PENDING_REMOVAL = 'PENDING_REMOVAL', + ONCHAIN_ORPHANED = 'ONCHAIN_ORPHANED', + OFFCHAIN_ORPHANED = 'OFFCHAIN_ORPHANED', +} /** * Information about a single piece in a dataset @@ -19,6 +37,7 @@ export interface PieceInfo { pieceId: number /** Piece Commitment (CommP) as string */ pieceCid: string + status: PieceStatus /** Root IPFS CID (from metadata, if available) */ rootIpfsCid?: string /** Piece size in bytes (if available) */ @@ -38,19 +57,7 @@ export interface DataSetPiecesResult { /** Total size of all pieces in bytes (sum of individual piece sizes) */ totalSizeBytes?: bigint /** Non-fatal warnings encountered during retrieval */ - warnings?: DataSetWarning[] -} - -/** - * Structured warning for non-fatal issues - */ -export interface DataSetWarning { - /** Machine-readable warning code (e.g., 'METADATA_FETCH_FAILED') */ - code: string - /** Human-readable warning message */ - message: string - /** Additional context data (e.g., { pieceId: 123, dataSetId: 456 }) */ - context?: Record + warnings?: Warning[] } /** diff --git a/src/core/piece/index.ts b/src/core/piece/index.ts new file mode 100644 index 0000000..0c6b7e4 --- /dev/null +++ b/src/core/piece/index.ts @@ -0,0 +1 @@ +export * from './remove-piece.js' diff --git a/src/core/piece/piece-status.ts b/src/core/piece/piece-status.ts new file mode 100644 index 0000000..c971a2b --- /dev/null +++ b/src/core/piece/piece-status.ts @@ -0,0 +1,71 @@ +import type { DataSetPieceData } from '@filoz/synapse-sdk' +import { PieceStatus } from '../data-set/types.js' +import type { Warning } from '../utils/types.js' + +interface PieceStatusContext { + pieceId: number + pieceCid: unknown + /** + * List of pieceIds that are scheduled for removal. + * + * This list is obtained from the PDPVerifier.getScheduledRemovals() method. + */ + scheduledRemovals: number[] + /** + * Map of provider-reported pieces keyed by pieceId. + * + * This map is mutated: when we confirm a piece is both on-chain and reported + * by the provider, we delete it so leftovers represent provider-only pieces. + */ + providerPiecesById: Map | null +} + +interface PieceStatusResult { + status: PieceStatus + warning?: Warning +} + +/** + * Reconcile a piece's status across the two data sources we have: + * + * - On-chain: StorageContext.getPieces() (source of truth for what the PDP verifier knows) + * - Provider-reported: PDPServer.getDataSet() (what the storage provider says it stores) + * + * https://github.com/filecoin-project/curio/issues/815 showed these can drift. This helper documents the rules we apply + * to flag mismatches without blocking the listing flow: + * + * 1. If PDPVerifier marked the piece for removal, treat as PENDING_REMOVAL. + * 2. If provider data is unavailable, assume ACTIVE (best effort). + * 3. If provider reports the piece, treat as ACTIVE and remove it from the map so + * any leftover entries become OFFCHAIN_ORPHANED later. + * 4. Otherwise, the piece is on-chain but missing from the provider => ONCHAIN_ORPHANED. + * + * The optional warning conveys orphan cases to callers for user-facing messaging. + */ +export function reconcilePieceStatus(context: PieceStatusContext): PieceStatusResult { + const { pieceId, pieceCid, scheduledRemovals, providerPiecesById } = context + + if (scheduledRemovals.includes(pieceId)) { + return { status: PieceStatus.PENDING_REMOVAL } + } + + if (providerPiecesById === null) { + // No provider data to compare against; assume the on-chain view is accurate. + return { status: PieceStatus.ACTIVE } + } + + if (providerPiecesById.has(pieceId)) { + // Provider matches on-chain; remove so leftovers can be flagged as off-chain orphans. + providerPiecesById.delete(pieceId) + return { status: PieceStatus.ACTIVE } + } + + return { + status: PieceStatus.ONCHAIN_ORPHANED, + warning: { + code: 'ONCHAIN_ORPHANED', + message: 'Piece is on-chain but the provider does not report it', + context: { pieceId, pieceCid }, + }, + } +} diff --git a/src/core/piece/remove-piece.ts b/src/core/piece/remove-piece.ts new file mode 100644 index 0000000..736e9a5 --- /dev/null +++ b/src/core/piece/remove-piece.ts @@ -0,0 +1,127 @@ +import type { StorageContext, Synapse } from '@filoz/synapse-sdk' +import type { Logger } from 'pino' +import { getErrorMessage } from '../utils/errors.js' +import type { ProgressEvent, ProgressEventHandler } from '../utils/types.js' + +/** + * Progress events emitted during piece removal + * + * These events allow callers to track the removal process: + * - submitting: Transaction is being submitted to blockchain + * - submitted: Transaction submitted successfully, txHash available + * - confirming: Waiting for transaction confirmation (if waitForConfirmation=true) + * - confirmation-failed: Confirmation wait timed out (non-fatal, tx may still succeed) + * - complete: Removal process finished + * + * Note: Errors are propagated via thrown exceptions, not events (similar to upload pattern) + */ +export type RemovePieceProgressEvents = + | ProgressEvent<'remove-piece:submitting', { pieceCid: string; dataSetId: number }> + | ProgressEvent<'remove-piece:submitted', { pieceCid: string; dataSetId: number; txHash: `0x${string}` | string }> + | ProgressEvent<'remove-piece:confirming', { pieceCid: string; dataSetId: number; txHash: `0x${string}` | string }> + | ProgressEvent< + 'remove-piece:confirmation-failed', + { pieceCid: string; dataSetId: number; txHash: `0x${string}` | string; message: string } + > + | ProgressEvent<'remove-piece:complete', { txHash: `0x${string}` | string; confirmed: boolean }> + +/** + * Number of block confirmations to wait for when waitForConfirmation=true + */ +const WAIT_CONFIRMATIONS = 1 + +/** + * Timeout in milliseconds for waiting for transaction confirmation + * Set to 2 minutes - generous default for Calibration network finality + */ +const WAIT_TIMEOUT_MS = 2 * 60 * 1000 + +/** + * Base options for piece removal + */ +interface RemovePieceOptionsBase { + /** Initialized Synapse SDK instance (required when waitForConfirmation is true)*/ + synapse?: Synapse | undefined + /** Optional progress event handler for tracking removal status */ + onProgress?: ProgressEventHandler | undefined + /** Whether to wait for transaction confirmation before returning (default: false) */ + waitForConfirmation?: boolean | undefined + /** Optional logger for tracking removal operations */ + logger?: Logger | undefined +} + +interface RemovePieceOptionsWithWaitForConfirmation extends RemovePieceOptionsBase { + waitForConfirmation: true + synapse: Synapse +} + +export type RemovePieceOptions = RemovePieceOptionsBase | RemovePieceOptionsWithWaitForConfirmation + +/** + * Remove a piece from a Data Set + * + * @example + * ```typescript + * const txHash = await removePiece('baga...', storageContext, { + * synapse, + * onProgress: (event) => console.log(event.type), + * waitForConfirmation: true + * }) + * ``` + * + * Process: + * 1. Submit the transaction via storageContext.deletePiece + * 2. Optionally wait for confirmation using Synapse provider + * 3. Emit progress events for each stage + * + * @param pieceCid - Piece CID to remove + * @param storageContext - Storage context bound to a Data Set + * @param options - Callbacks and confirmation settings (synapse required if waiting) + * @returns Transaction hash of the removal + */ +export async function removePiece( + pieceCid: string, + storageContext: StorageContext, + options: RemovePieceOptions +): Promise<`0x${string}` | string> { + const { onProgress, waitForConfirmation } = options + const dataSetId = storageContext.dataSetId + + if (dataSetId == null) { + throw new Error( + 'Storage context must be bound to a Data Set before removing pieces. Use createStorageContext with dataset.useExisting to bind to a Data Set.' + ) + } + if (waitForConfirmation === true && !isWaitForConfirmationOptions(options)) { + throw new Error('A Synapse instance is required when waitForConfirmation is true') + } + + onProgress?.({ type: 'remove-piece:submitting', data: { pieceCid, dataSetId } }) + const txHash = await storageContext.deletePiece(pieceCid) + onProgress?.({ type: 'remove-piece:submitted', data: { pieceCid, dataSetId, txHash } }) + + let isConfirmed = false + if (isWaitForConfirmationOptions(options)) { + const { synapse } = options + onProgress?.({ type: 'remove-piece:confirming', data: { pieceCid, dataSetId, txHash } }) + try { + await synapse.getProvider().waitForTransaction(txHash, WAIT_CONFIRMATIONS, WAIT_TIMEOUT_MS) + isConfirmed = true + } catch (error: unknown) { + // Confirmation timeout is non-fatal - transaction may still succeed + onProgress?.({ + type: 'remove-piece:confirmation-failed', + data: { pieceCid, dataSetId, txHash, message: getErrorMessage(error) }, + }) + } + } + + onProgress?.({ type: 'remove-piece:complete', data: { txHash, confirmed: isConfirmed } }) + return txHash +} + +function isWaitForConfirmationOptions( + options: RemovePieceOptions +): options is RemovePieceOptionsWithWaitForConfirmation { + return options.waitForConfirmation === true && options.synapse != null +} diff --git a/src/core/utils/types.ts b/src/core/utils/types.ts index 7437f97..86a3119 100644 --- a/src/core/utils/types.ts +++ b/src/core/utils/types.ts @@ -5,3 +5,12 @@ export type ProgressEvent = D extends : { type: T; data: D } export type ProgressEventHandler = (event: E) => void + +export interface Warning { + /** Machine-readable warning code (e.g., 'METADATA_FETCH_FAILED') */ + code: string + /** Human-readable warning message */ + message: string + /** Additional context data (e.g., { pieceId: 123, dataSetId: 456 }) */ + context?: Record +} diff --git a/src/data-set/display.ts b/src/data-set/display.ts index adbad58..fe03e39 100644 --- a/src/data-set/display.ts +++ b/src/data-set/display.ts @@ -1,6 +1,7 @@ import { METADATA_KEYS } from '@filoz/synapse-sdk' import pc from 'picocolors' import type { DataSetSummary, PieceInfo } from '../core/data-set/types.js' +import { PieceStatus } from '../core/data-set/types.js' import { formatFileSize } from '../utils/cli-helpers.js' import { log } from '../utils/cli-logger.js' @@ -173,12 +174,28 @@ function renderMetadata(metadata: Record, indentLevel: number = function renderPiece(piece: PieceInfo, baseIndentLevel: number = 2): void { const sizeDisplay = piece.size != null ? formatFileSize(piece.size) : pc.gray('unknown') - log.indent(pc.bold(`#${piece.pieceId}`), baseIndentLevel) + let pieceStatusDisplay: string + switch (piece.status) { + case PieceStatus.ACTIVE: + pieceStatusDisplay = pc.green('active') + break + case PieceStatus.PENDING_REMOVAL: + pieceStatusDisplay = pc.yellow('pending removal') + break + case PieceStatus.ONCHAIN_ORPHANED: + pieceStatusDisplay = pc.red('onchain orphaned') + break + case PieceStatus.OFFCHAIN_ORPHANED: + pieceStatusDisplay = pc.red('offchain orphaned') + break + default: + pieceStatusDisplay = pc.gray('unknown') + break + } + log.indent(pc.bold(`#${piece.pieceId} (${pieceStatusDisplay})`), baseIndentLevel) log.indent(`PieceCID: ${piece.pieceCid}`, baseIndentLevel + 1) log.indent(`Size: ${sizeDisplay}`, baseIndentLevel + 1) - const extraMetadataEntries = Object.entries(piece.metadata ?? {}) - renderMetadata(Object.fromEntries(extraMetadataEntries), baseIndentLevel + 1) } diff --git a/src/index-types.ts b/src/index-types.ts index 902fdcc..2f27f27 100644 --- a/src/index-types.ts +++ b/src/index-types.ts @@ -25,10 +25,10 @@ export type { ProviderInfo } from '@filoz/synapse-sdk' export type { DataSetPiecesResult, DataSetSummary, - DataSetWarning, GetDataSetPiecesOptions, ListDataSetsOptions, PieceInfo, + PieceStatus, } from './core/data-set/index.js' export type { PaymentCapacityCheck, PaymentStatus, SetMaxAllowancesResult } from './core/payments/index.js' export type { ServiceApprovalStatus, StorageAllowances } from './core/payments/types.js' @@ -52,7 +52,7 @@ export type { UploadReadinessProgressEvents, UploadReadinessResult, } from './core/upload/index.js' -export type { AnyProgressEvent, ProgressEvent, ProgressEventHandler } from './core/utils/types.js' +export type { AnyProgressEvent, ProgressEvent, ProgressEventHandler, Warning } from './core/utils/types.js' export type { ValidateIPNIProgressEvents, WaitForIpniProviderResultsOptions, diff --git a/src/rm/index.ts b/src/rm/index.ts new file mode 100644 index 0000000..0c6b7e4 --- /dev/null +++ b/src/rm/index.ts @@ -0,0 +1 @@ +export * from './remove-piece.js' diff --git a/src/rm/remove-piece.ts b/src/rm/remove-piece.ts new file mode 100644 index 0000000..4a6a94a --- /dev/null +++ b/src/rm/remove-piece.ts @@ -0,0 +1,150 @@ +/** + * CLI entrypoint for removing a piece from a Data Set. + * + * Responsibilities: + * - Validate required CLI arguments (piece CID, dataSet) + * - Initialize Synapse with CLI auth/env configuration + * - Wire up progress events to spinner output + * - Return transaction hash and confirmation status (or throw on failure) + */ +import pc from 'picocolors' +import pino from 'pino' +import { TELEMETRY_CLI_APP_NAME } from '../common/constants.js' +import { type RemovePieceProgressEvents, removePiece } from '../core/piece/index.js' +import { cleanupSynapseService, createStorageContext, initializeSynapse } from '../core/synapse/index.js' +import { parseCLIAuth } from '../utils/cli-auth.js' +import { cancel, createSpinner, intro, outro } from '../utils/cli-helpers.js' +import { log } from '../utils/cli-logger.js' +import type { RmPieceOptions, RmPieceResult } from './types.js' + +/** + * Run the remove piece process. + * + * @param options - CLI options including piece CID and dataSet id + * @returns Transaction hash, confirmation status, and identifiers used + * + * Behavior: + * - Requires both `piece` and `dataSet`; throws if missing/invalid + * - Uses CLI auth env/flags via parseCLIAuth + * - Streams progress to spinner and exits with cancel on failure + * - Always calls cleanupSynapseService to close providers + */ +export async function runRmPiece(options: RmPieceOptions): Promise { + intro(pc.bold('Filecoin Pin Remove')) + + const spinner = createSpinner() + + // Initialize logger (silent for CLI output) + const logger = pino({ + level: process.env.LOG_LEVEL || 'silent', + }) + + const { piece: pieceCid, dataSet } = options + + // Validate inputs + if (!pieceCid || !dataSet) { + spinner.stop(`${pc.red('✗')} Piece CID and DataSet ID are required`) + cancel('Remove cancelled') + throw new Error('Piece CID and DataSet ID are required') + } + + const dataSetId = Number(dataSet) + if (!Number.isInteger(dataSetId) || dataSetId <= 0) { + spinner.stop(`${pc.red('✗')} DataSet ID must be a positive integer`) + cancel('Remove cancelled') + throw new Error('DataSet ID must be a positive integer') + } + + try { + spinner.start('Initializing Synapse SDK...') + + const authConfig = parseCLIAuth(options) + const synapse = await initializeSynapse( + { ...authConfig, telemetry: { sentrySetTags: { appName: TELEMETRY_CLI_APP_NAME } } }, + logger + ) + const network = synapse.getNetwork() + + spinner.stop(`${pc.green('✓')} Connected to ${pc.bold(network)}`) + + log.spinnerSection('Remove Configuration', [ + pc.gray(`Piece CID: ${pieceCid}`), + pc.gray(`Data Set ID: ${dataSetId}`), + ]) + + // Track transaction details + let txHash = '' + let isConfirmed = false + + // Remove piece with progress tracking + const onProgress = (event: RemovePieceProgressEvents): void => { + switch (event.type) { + case 'remove-piece:submitting': + spinner.start('Submitting remove transaction...') + break + + case 'remove-piece:submitted': + spinner.message(`Transaction submitted: ${event.data.txHash}`) + txHash = event.data.txHash + break + + case 'remove-piece:confirming': + spinner.message('Waiting for transaction confirmation...') + break + + case 'remove-piece:confirmation-failed': + spinner.message(`${pc.yellow('⚠')} Confirmation wait timed out: ${event.data.message}`) + break + + case 'remove-piece:complete': + isConfirmed = event.data.confirmed + txHash = event.data.txHash + spinner.stop(`${pc.green('✓')} Piece removed${isConfirmed ? ' and confirmed' : ' (confirmation pending)'}`) + break + } + } + + spinner.start('Creating storage context...') + const { storage } = await createStorageContext(synapse, { logger, dataset: { useExisting: dataSetId } }) + + spinner.stop(`${pc.green('✓')} Storage context created`) + + spinner.start('Removing piece...') + txHash = await removePiece(pieceCid, storage, { + synapse, + logger, + onProgress, + waitForConfirmation: options.waitForConfirmation ?? false, + }) + + // Display results + log.spinnerSection('Results', [ + pc.gray(`Transaction Hash: ${txHash}`), + pc.gray(`Status: ${isConfirmed ? 'Confirmed' : 'Pending confirmation'}`), + pc.gray(`Network: ${network}`), + ]) + + const result: RmPieceResult = { + pieceCid, + dataSetId, + transactionHash: txHash, + confirmed: isConfirmed, + } + + // Clean up WebSocket providers to allow process termination + await cleanupSynapseService() + + outro('Remove completed successfully') + + return result + } catch (error) { + spinner.stop(`${pc.red('✗')} Remove failed: ${error instanceof Error ? error.message : 'Unknown error'}`) + logger.error({ event: 'rm.failed', error }, 'Remove failed') + + cancel('Remove failed') + throw error + } finally { + // Always cleanup WebSocket providers + await cleanupSynapseService() + } +} diff --git a/src/rm/types.ts b/src/rm/types.ts new file mode 100644 index 0000000..a910136 --- /dev/null +++ b/src/rm/types.ts @@ -0,0 +1,14 @@ +import type { CLIAuthOptions } from '../utils/cli-auth.js' + +export interface RmPieceOptions extends CLIAuthOptions { + piece: string + dataSet: string + waitForConfirmation?: boolean +} + +export interface RmPieceResult { + pieceCid: string + dataSetId: number + transactionHash: string + confirmed: boolean +} diff --git a/src/test/mocks/synapse-sdk.ts b/src/test/mocks/synapse-sdk.ts index 1618781..66bd731 100644 --- a/src/test/mocks/synapse-sdk.ts +++ b/src/test/mocks/synapse-sdk.ts @@ -93,6 +93,30 @@ export const METADATA_KEYS = { IPFS_ROOT_CID: 'ipfsRootCid', } +/** + * Mock PDPVerifier for testing scheduled removals + */ +export class PDPVerifier { + async getScheduledRemovals(_dataSetId: number): Promise { + return [] + } +} + +/** + * Mock PDPServer for testing piece data from provider + */ +export class PDPServer { + async getDataSet(_dataSetId: number): Promise<{ pieces: any[] }> { + return { pieces: [] } + } +} + +// Mock DataSetPieceData type +export type DataSetPieceData = { + pieceId: number + pieceCid: string +} + // Export mock permission type hashes (keccak256 hashes of EIP-712 type strings) // These match the actual values from the SDK export const CREATE_DATA_SET_TYPEHASH = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef' diff --git a/src/test/unit/core-data-set.test.ts b/src/test/unit/core-data-set.test.ts index 881f354..0085b64 100644 --- a/src/test/unit/core-data-set.test.ts +++ b/src/test/unit/core-data-set.test.ts @@ -17,6 +17,7 @@ const { mockGetProviders, mockGetPieces, mockGetAddress, + mockPDPServerGetDataSet, state, } = vi.hoisted(() => { const state = { @@ -24,6 +25,7 @@ const { providers: [] as any[], pieces: [] as Array<{ pieceId: number; pieceCid: { toString: () => string } }>, pieceMetadata: {} as Record>, + pdpServerPieces: [] as Array<{ pieceId: number; pieceCid: string }>, } const mockGetAddress = vi.fn(async () => '0xtest-address') @@ -37,12 +39,25 @@ const { } }) const mockGetNetwork = vi.fn(() => ({ chainId: 314159n, name: 'calibration' })) + const mockGetProviderInfo = vi.fn(async () => ({ + products: { + PDP: { + data: { + serviceURL: 'http://localhost:8888/pdp', + }, + }, + }, + })) + const mockPDPServerGetDataSet = vi.fn(async (_dataSetId: number) => ({ + pieces: state.pdpServerPieces, + })) const mockWarmStorageInstance = { getPieceMetadata: vi.fn(async (_dataSetId: number, pieceId: number) => { return state.pieceMetadata[pieceId] ?? {} }), getServiceProviderRegistryAddress: vi.fn(async () => '0xsp-registry'), + getPDPVerifierAddress: vi.fn(() => '0xpdp-verifier'), } const mockWarmStorageCreate = vi.fn(async () => mockWarmStorageInstance) @@ -51,6 +66,9 @@ const { dataSetId: 123, synapse: null as any, // will be set in tests getPieces: mockGetPieces, + provider: { + serviceProvider: '0xservice-provider', + }, } const mockSynapse = { @@ -58,6 +76,7 @@ const { getProvider: () => ({}), getNetwork: mockGetNetwork, getWarmStorageAddress: () => '0xwarm-storage', + getProviderInfo: mockGetProviderInfo, storage: { findDataSets: mockFindDataSets, }, @@ -72,6 +91,8 @@ const { mockGetProviders, mockGetPieces, mockGetAddress, + mockGetProviderInfo, + mockPDPServerGetDataSet, state, } }) @@ -81,6 +102,11 @@ vi.mock('@filoz/synapse-sdk', async () => { return { ...sharedMock, WarmStorageService: { create: mockWarmStorageCreate }, + PDPServer: class { + async getDataSet(dataSetId: number) { + return mockPDPServerGetDataSet(dataSetId) + } + }, } }) @@ -94,6 +120,7 @@ vi.mock('@filoz/synapse-core/piece', () => ({ if (cidString === 'bafkpiece2') return 4194304 // 4 MiB throw new Error(`Invalid piece CID: ${cidString}`) }), + MAX_UPLOAD_SIZE: 32 * 1024 * 1024 * 1024, // 32 GiB })) vi.mock('@filoz/synapse-sdk/sp-registry', () => { return { @@ -311,6 +338,7 @@ describe('getDataSetPieces', () => { vi.clearAllMocks() state.pieces = [] state.pieceMetadata = {} + state.pdpServerPieces = [] mockStorageContext.synapse = mockSynapse }) @@ -327,6 +355,10 @@ describe('getDataSetPieces', () => { { pieceId: 0, pieceCid: { toString: () => 'bafkpiece0' } }, { pieceId: 1, pieceCid: { toString: () => 'bafkpiece1' } }, ] + state.pdpServerPieces = [ + { pieceId: 0, pieceCid: 'bafkpiece0' }, + { pieceId: 1, pieceCid: 'bafkpiece1' }, + ] const result = await getDataSetPieces(mockSynapse as any, mockStorageContext as any, { includeMetadata: false, @@ -349,6 +381,10 @@ describe('getDataSetPieces', () => { { pieceId: 0, pieceCid: { toString: () => 'bafkpiece0' } }, { pieceId: 1, pieceCid: { toString: () => 'bafkpiece1' } }, ] + state.pdpServerPieces = [ + { pieceId: 0, pieceCid: 'bafkpiece0' }, + { pieceId: 1, pieceCid: 'bafkpiece1' }, + ] state.pieceMetadata = { 0: { [METADATA_KEYS.IPFS_ROOT_CID]: 'bafyroot0', @@ -382,6 +418,11 @@ describe('getDataSetPieces', () => { { pieceId: 0, pieceCid: { toString: () => 'bafkpiece0' } }, { pieceId: 1, pieceCid: { toString: () => 'bafkpiece1' } }, ] + // Set pdpServerPieces to match onchain pieces (no orphaned warnings) + state.pdpServerPieces = [ + { pieceId: 0, pieceCid: 'bafkpiece0' }, + { pieceId: 1, pieceCid: 'bafkpiece1' }, + ] state.pieceMetadata = { 0: { [METADATA_KEYS.IPFS_ROOT_CID]: 'bafyroot0', @@ -415,7 +456,11 @@ describe('getDataSetPieces', () => { it('adds warning when WarmStorage initialization fails', async () => { state.pieces = [{ pieceId: 0, pieceCid: { toString: () => 'bafkpiece0' } }] - mockWarmStorageCreate.mockRejectedValueOnce(new Error('WarmStorage unavailable')) + // Don't set pdpServerPieces - when WarmStorage fails, PDPServer.getDataSet() is never called + // Both WarmStorage calls should fail (first for scheduled removals, second for metadata) + mockWarmStorageCreate + .mockRejectedValueOnce(new Error('WarmStorage unavailable')) + .mockRejectedValueOnce(new Error('WarmStorage unavailable')) const result = await getDataSetPieces(mockSynapse as any, mockStorageContext as any, { includeMetadata: true, @@ -423,10 +468,20 @@ describe('getDataSetPieces', () => { expect(result.pieces).toHaveLength(1) expect(result.pieces[0]?.metadata).toBeUndefined() - expect(result.warnings).toHaveLength(1) - expect(result.warnings?.[0]).toMatchObject({ + // Expect 2 warnings: SCHEDULED_REMOVALS_UNAVAILABLE and WARM_STORAGE_INIT_FAILED + expect(result.warnings).toHaveLength(2) + expect(result.warnings).toContainEqual({ + code: 'SCHEDULED_REMOVALS_UNAVAILABLE', + message: 'Failed to get scheduled removals', + context: { + dataSetId: 123, + error: 'Error: WarmStorage unavailable', + }, + }) + expect(result.warnings).toContainEqual({ code: 'WARM_STORAGE_INIT_FAILED', message: 'Failed to initialize WarmStorageService for metadata enrichment', + context: { error: 'Error: WarmStorage unavailable' }, }) }) @@ -443,6 +498,7 @@ describe('getDataSetPieces', () => { it('handles pieces without root IPFS CID in metadata', async () => { state.pieces = [{ pieceId: 0, pieceCid: { toString: () => 'bafkpiece0' } }] + state.pdpServerPieces = [{ pieceId: 0, pieceCid: 'bafkpiece0' }] state.pieceMetadata = { 0: { label: 'no-cid-file.txt', @@ -466,6 +522,10 @@ describe('getDataSetPieces', () => { { pieceId: 0, pieceCid: { toString: () => 'bafkpiece0' } }, { pieceId: 1, pieceCid: { toString: () => 'bafkpiece1' } }, ] + state.pdpServerPieces = [ + { pieceId: 0, pieceCid: 'bafkpiece0' }, + { pieceId: 1, pieceCid: 'bafkpiece1' }, + ] const result = await getDataSetPieces(mockSynapse as any, mockStorageContext as any) @@ -480,6 +540,11 @@ describe('getDataSetPieces', () => { { pieceId: 1, pieceCid: { toString: () => 'bafkpiece1' } }, // 2 MiB { pieceId: 2, pieceCid: { toString: () => 'bafkpiece2' } }, // 4 MiB ] + state.pdpServerPieces = [ + { pieceId: 0, pieceCid: 'bafkpiece0' }, + { pieceId: 1, pieceCid: 'bafkpiece1' }, + { pieceId: 2, pieceCid: 'bafkpiece2' }, + ] const result = await getDataSetPieces(mockSynapse as any, mockStorageContext as any) @@ -502,6 +567,11 @@ describe('getDataSetPieces', () => { { pieceId: 1, pieceCid: { toString: () => 'invalid-cid' } }, // Will throw { pieceId: 2, pieceCid: { toString: () => 'bafkpiece2' } }, // Valid ] + state.pdpServerPieces = [ + { pieceId: 0, pieceCid: 'bafkpiece0' }, + { pieceId: 1, pieceCid: 'invalid-cid' }, + { pieceId: 2, pieceCid: 'bafkpiece2' }, + ] const result = await getDataSetPieces(mockSynapse as any, mockStorageContext as any) @@ -512,4 +582,83 @@ describe('getDataSetPieces', () => { // Total should only include pieces with valid sizes expect(result.totalSizeBytes).toBe(BigInt(1048576 + 4194304)) }) + + it('adds ONCHAIN_ORPHANED warning when piece is on-chain but not reported by provider', async () => { + state.pieces = [ + { pieceId: 0, pieceCid: { toString: () => 'bafkpiece0' } }, + { pieceId: 1, pieceCid: { toString: () => 'bafkpiece1' } }, + ] + // PDPServer only reports piece 0, so piece 1 will be flagged as ONCHAIN_ORPHANED + state.pdpServerPieces = [{ pieceId: 0, pieceCid: 'bafkpiece0' }] + + const result = await getDataSetPieces(mockSynapse as any, mockStorageContext as any) + + expect(result.pieces).toHaveLength(2) + expect(result.warnings).toHaveLength(1) + expect(result.warnings?.[0]).toMatchObject({ + code: 'ONCHAIN_ORPHANED', + message: 'Piece is on-chain but the provider does not report it', + context: { + pieceId: 1, + pieceCid: { toString: expect.any(Function) }, + }, + }) + }) + + it('adds OFFCHAIN_ORPHANED warning when piece is reported by provider but not on-chain', async () => { + state.pieces = [{ pieceId: 0, pieceCid: { toString: () => 'bafkpiece0' } }] + // PDPServer reports 2 pieces, but only piece 0 is on-chain + state.pdpServerPieces = [ + { pieceId: 0, pieceCid: 'bafkpiece0' }, + { pieceId: 1, pieceCid: 'bafkpiece1' }, + ] + + const result = await getDataSetPieces(mockSynapse as any, mockStorageContext as any) + + // Should have 2 pieces: 1 from on-chain and 1 from provider + expect(result.pieces).toHaveLength(2) + expect(result.warnings).toHaveLength(1) + expect(result.warnings?.[0]).toMatchObject({ + code: 'OFFCHAIN_ORPHANED', + message: 'Piece is reported by provider but not on-chain', + context: { + pieceId: 1, + pieceCid: 'bafkpiece1', + }, + }) + }) + + it('handles both ONCHAIN_ORPHANED and OFFCHAIN_ORPHANED warnings in same result', async () => { + state.pieces = [ + { pieceId: 0, pieceCid: { toString: () => 'bafkpiece0' } }, + { pieceId: 2, pieceCid: { toString: () => 'bafkpiece2' } }, + ] + // PDPServer reports pieces 0 and 1, but on-chain has pieces 0 and 2 + state.pdpServerPieces = [ + { pieceId: 0, pieceCid: 'bafkpiece0' }, + { pieceId: 1, pieceCid: 'bafkpiece1' }, + ] + + const result = await getDataSetPieces(mockSynapse as any, mockStorageContext as any) + + // Should have 3 pieces total: 0 (active), 1 (offchain orphaned), 2 (onchain orphaned) + expect(result.pieces).toHaveLength(3) + expect(result.warnings).toHaveLength(2) + expect(result.warnings).toContainEqual({ + code: 'ONCHAIN_ORPHANED', + message: 'Piece is on-chain but the provider does not report it', + context: { + pieceId: 2, + pieceCid: { toString: expect.any(Function) }, + }, + }) + expect(result.warnings).toContainEqual({ + code: 'OFFCHAIN_ORPHANED', + message: 'Piece is reported by provider but not on-chain', + context: { + pieceId: 1, + pieceCid: 'bafkpiece1', + }, + }) + }) }) diff --git a/src/test/unit/remove-piece.test.ts b/src/test/unit/remove-piece.test.ts new file mode 100644 index 0000000..f7a596b --- /dev/null +++ b/src/test/unit/remove-piece.test.ts @@ -0,0 +1,130 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest' +import { removePiece } from '../../core/piece/index.js' + +const { mockDeletePiece, mockWaitForTransaction, mockSynapse, storageContext, state } = vi.hoisted(() => { + const state = { + txHash: '0xtest-hash', + dataSetId: 99, + } + + const mockDeletePiece = vi.fn(async () => state.txHash) + const storageContext = { + dataSetId: state.dataSetId, + deletePiece: mockDeletePiece, + } + + const mockWaitForTransaction = vi.fn(async () => undefined) + const mockSynapse = { + getProvider: () => ({ + waitForTransaction: mockWaitForTransaction, + }), + } + + return { mockDeletePiece, mockWaitForTransaction, mockSynapse, storageContext, state } +}) + +describe('removePiece', () => { + beforeEach(() => { + vi.clearAllMocks() + state.txHash = '0xtest-hash' + state.dataSetId = 99 + storageContext.dataSetId = state.dataSetId + }) + + it('removes a piece without waiting for confirmation', async () => { + const result = await removePiece('bafkzcibpiece', storageContext as any, {}) + + expect(result).toBe(state.txHash) + expect(mockDeletePiece).toHaveBeenCalledWith('bafkzcibpiece') + expect(mockWaitForTransaction).not.toHaveBeenCalled() + }) + + it('throws when storage context is not bound to a data set', async () => { + const unboundStorage = { dataSetId: null, deletePiece: vi.fn() } + + await expect(removePiece('bafkzcibpiece', unboundStorage as any, {})).rejects.toThrow( + /Storage context must be bound to a Data Set/ + ) + }) + + it('emits progress events and waits for confirmation when requested', async () => { + const onProgress = vi.fn() + mockWaitForTransaction.mockResolvedValueOnce(undefined) + + const result = await removePiece('bafkzcibpiece', storageContext as any, { + synapse: mockSynapse as any, + waitForConfirmation: true, + onProgress, + }) + + expect(result).toBe(state.txHash) + expect(mockWaitForTransaction).toHaveBeenCalledWith(state.txHash, 1, 120000) + expect(onProgress).toHaveBeenNthCalledWith(1, { + type: 'remove-piece:submitting', + data: { pieceCid: 'bafkzcibpiece', dataSetId: 99 }, + }) + expect(onProgress).toHaveBeenNthCalledWith(2, { + type: 'remove-piece:submitted', + data: { pieceCid: 'bafkzcibpiece', dataSetId: 99, txHash: state.txHash }, + }) + expect(onProgress).toHaveBeenNthCalledWith(3, { + type: 'remove-piece:confirming', + data: { pieceCid: 'bafkzcibpiece', dataSetId: 99, txHash: state.txHash }, + }) + expect(onProgress).toHaveBeenNthCalledWith(4, { + type: 'remove-piece:complete', + data: { txHash: state.txHash, confirmed: true }, + }) + }) + + it('emits confirmation-failed and still completes when confirmation times out', async () => { + const onProgress = vi.fn() + mockWaitForTransaction.mockRejectedValueOnce(new Error('timeout')) + + const result = await removePiece('bafkzcibpiece', storageContext as any, { + synapse: mockSynapse as any, + waitForConfirmation: true, + onProgress, + }) + + expect(result).toBe(state.txHash) + expect(mockWaitForTransaction).toHaveBeenCalledWith(state.txHash, 1, 120000) + expect(onProgress).toHaveBeenCalledWith({ + type: 'remove-piece:confirmation-failed', + data: { pieceCid: 'bafkzcibpiece', dataSetId: 99, txHash: state.txHash, message: 'timeout' }, + }) + expect(onProgress).toHaveBeenLastCalledWith({ + type: 'remove-piece:complete', + data: { txHash: state.txHash, confirmed: false }, + }) + }) + + it('skips confirmation when waitForConfirmation is false', async () => { + const onProgress = vi.fn() + + const result = await removePiece('bafkzcibpiece', storageContext as any, { onProgress }) + + expect(result).toBe(state.txHash) + expect(mockWaitForTransaction).not.toHaveBeenCalled() + expect(onProgress).toHaveBeenNthCalledWith(1, { + type: 'remove-piece:submitting', + data: { pieceCid: 'bafkzcibpiece', dataSetId: 99 }, + }) + expect(onProgress).toHaveBeenNthCalledWith(2, { + type: 'remove-piece:submitted', + data: { pieceCid: 'bafkzcibpiece', dataSetId: 99, txHash: state.txHash }, + }) + expect(onProgress).toHaveBeenNthCalledWith(3, { + type: 'remove-piece:complete', + data: { txHash: state.txHash, confirmed: false }, + }) + }) + + it('throws when waiting for confirmation without a synapse instance', async () => { + await expect( + removePiece('bafkzcibpiece', storageContext as any, { + waitForConfirmation: true, + }) + ).rejects.toThrow('A Synapse instance is required when waitForConfirmation is true') + }) +}) diff --git a/src/test/unit/rm-cmd.test.ts b/src/test/unit/rm-cmd.test.ts new file mode 100644 index 0000000..b5b0d17 --- /dev/null +++ b/src/test/unit/rm-cmd.test.ts @@ -0,0 +1,156 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest' +import { runRmPiece } from '../../rm/remove-piece.js' +import type { RmPieceOptions } from '../../rm/types.js' + +const { + spinner, + mockIntro, + mockOutro, + mockCancel, + mockCreateSpinner, + mockParseCLIAuth, + mockInitializeSynapse, + mockCreateStorageContext, + mockCleanupSynapseService, + mockRemovePiece, + mockLogSection, +} = vi.hoisted(() => { + const spinner = { + start: vi.fn(), + stop: vi.fn(), + message: vi.fn(), + } + + const mockIntro = vi.fn() + const mockOutro = vi.fn() + const mockCancel = vi.fn() + const mockCreateSpinner = vi.fn(() => spinner) + const mockParseCLIAuth = vi.fn(() => ({ privateKey: '0xabc', rpcUrl: 'wss://rpc' })) + const mockCleanupSynapseService = vi.fn(async () => { + // no-op for tests + }) + const mockLogSection = vi.fn() + + const mockInitializeSynapse = vi.fn(async () => ({ + getNetwork: () => 'calibration', + })) + + const mockStorageContext = { dataSetId: 123 } + const mockCreateStorageContext = vi.fn(async () => ({ storage: mockStorageContext })) + + const mockRemovePiece = vi.fn(async (_pieceCid: string, _storage: any, opts: { onProgress?: any }) => { + opts.onProgress?.({ + type: 'remove-piece:submitted', + data: { pieceCid: _pieceCid, dataSetId: mockStorageContext.dataSetId, txHash: '0xtx' }, + }) + opts.onProgress?.({ + type: 'remove-piece:complete', + data: { txHash: '0xtx', confirmed: true }, + }) + return '0xtx' + }) + + return { + spinner, + mockIntro, + mockOutro, + mockCancel, + mockCreateSpinner, + mockParseCLIAuth, + mockInitializeSynapse, + mockCreateStorageContext, + mockCleanupSynapseService, + mockRemovePiece, + mockLogSection, + } +}) + +vi.mock('../../utils/cli-helpers.js', () => ({ + intro: mockIntro, + outro: mockOutro, + cancel: mockCancel, + createSpinner: mockCreateSpinner, +})) + +vi.mock('../../utils/cli-auth.js', () => ({ + parseCLIAuth: mockParseCLIAuth, +})) + +vi.mock('../../utils/cli-logger.js', () => ({ + log: { spinnerSection: mockLogSection }, +})) + +vi.mock('../../core/synapse/index.js', () => ({ + initializeSynapse: mockInitializeSynapse, + createStorageContext: mockCreateStorageContext, + cleanupSynapseService: mockCleanupSynapseService, +})) + +vi.mock('../../core/piece/index.js', () => ({ + removePiece: mockRemovePiece, +})) + +describe('runRmPiece', () => { + const baseOptions: RmPieceOptions = { + piece: 'bafkzcibpiece', + dataSet: '123', + } + + beforeEach(() => { + vi.clearAllMocks() + }) + + it('removes a piece and returns result with confirmation status', async () => { + const result = await runRmPiece({ ...baseOptions, waitForConfirmation: true }) + + expect(result).toEqual({ + pieceCid: 'bafkzcibpiece', + dataSetId: 123, + transactionHash: '0xtx', + confirmed: true, + }) + + expect(mockInitializeSynapse).toHaveBeenCalledWith( + expect.objectContaining({ privateKey: '0xabc', rpcUrl: 'wss://rpc' }), + expect.anything() + ) + expect(mockRemovePiece).toHaveBeenCalledWith( + 'bafkzcibpiece', + expect.objectContaining({ dataSetId: 123 }), + expect.objectContaining({ + waitForConfirmation: true, + onProgress: expect.any(Function), + }) + ) + expect(mockCreateStorageContext).toHaveBeenCalledWith(expect.anything(), { + logger: expect.anything(), + dataset: { useExisting: 123 }, + }) + expect(spinner.stop).toHaveBeenCalledWith(expect.stringContaining('Piece removed')) + expect(mockCleanupSynapseService).toHaveBeenCalled() + expect(mockIntro).toHaveBeenCalled() + expect(mockOutro).toHaveBeenCalledWith('Remove completed successfully') + }) + + it('throws when piece CID or data set is missing', async () => { + await expect( + runRmPiece({ + ...baseOptions, + piece: '', + dataSet: '', + } as RmPieceOptions) + ).rejects.toThrow('Piece CID and DataSet ID are required') + + expect(mockRemovePiece).not.toHaveBeenCalled() + expect(mockCancel).toHaveBeenCalledWith('Remove cancelled') + expect(spinner.stop).toHaveBeenCalled() + }) + + it('throws on invalid data set id', async () => { + await expect(runRmPiece({ ...baseOptions, dataSet: '-5' })).rejects.toThrow('DataSet ID must be a positive integer') + + expect(mockRemovePiece).not.toHaveBeenCalled() + expect(mockCancel).toHaveBeenCalledWith('Remove cancelled') + expect(spinner.stop).toHaveBeenCalled() + }) +}) diff --git a/upload-action/package-lock.json b/upload-action/package-lock.json index 62b654e..9ea1db6 100644 --- a/upload-action/package-lock.json +++ b/upload-action/package-lock.json @@ -25,8 +25,8 @@ "license": "Apache-2.0 OR MIT", "dependencies": { "@clack/prompts": "^0.11.0", - "@filoz/synapse-core": "^0.1.3", - "@filoz/synapse-sdk": "^0.36.0", + "@filoz/synapse-core": "^0.1.4", + "@filoz/synapse-sdk": "^0.36.1", "@helia/unixfs": "^6.0.1", "@ipld/car": "^5.4.2", "commander": "^14.0.1",