From 3beabcc25ba922942548d1d656d9a68e90077575 Mon Sep 17 00:00:00 2001 From: Adam Jones Date: Mon, 17 Nov 2025 15:02:02 +0000 Subject: [PATCH 1/3] fix(filesystem): convert to modern TypeScript SDK APIs Convert the filesystem server to use the modern McpServer API instead of the low-level Server API. Key changes: - Replace Server with McpServer from @modelcontextprotocol/sdk/server/mcp.js - Convert all 13 tools to use registerTool() instead of manual request handlers - Use Zod schemas directly in inputSchema/outputSchema - Add structuredContent to all tool responses - Fix type literals to use 'as const' assertions - Update roots protocol handling to use server.server.* pattern - Fix tsconfig to exclude vitest.config.ts Tools converted: - read_file (deprecated) - read_text_file - read_media_file - read_multiple_files - write_file - edit_file - create_directory - list_directory - list_directory_with_sizes - directory_tree - move_file - search_files - get_file_info - list_allowed_directories The modern API provides: - Less boilerplate code - Better type safety with Zod - More declarative tool registration - Cleaner, more maintainable code --- src/filesystem/index.ts | 979 +++++++++++++++++++---------------- src/filesystem/tsconfig.json | 3 +- 2 files changed, 535 insertions(+), 447 deletions(-) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index 7888196285..e8ddc233f8 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -1,11 +1,8 @@ #!/usr/bin/env node -import { Server } from "@modelcontextprotocol/sdk/server/index.js"; +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; import { - CallToolRequestSchema, - ListToolsRequestSchema, - ToolSchema, RootsListChangedNotificationSchema, type Root, } from "@modelcontextprotocol/sdk/types.js"; @@ -13,7 +10,6 @@ import fs from "fs/promises"; import { createReadStream } from "fs"; import path from "path"; import { z } from "zod"; -import { zodToJsonSchema } from "zod-to-json-schema"; import { minimatch } from "minimatch"; import { normalizePath, expandHome } from './path-utils.js'; import { getValidRootDirectories } from './roots-utils.js'; @@ -143,20 +139,12 @@ const GetFileInfoArgsSchema = z.object({ path: z.string(), }); -const ToolInputSchema = ToolSchema.shape.inputSchema; -type ToolInput = z.infer; - // Server setup -const server = new Server( +const server = new McpServer( { name: "secure-filesystem-server", version: "0.2.0", - }, - { - capabilities: { - tools: {}, - }, - }, + } ); // Reads a file as a stream of buffers, concatenates them, and then encodes @@ -177,468 +165,567 @@ async function readFileAsBase64Stream(filePath: string): Promise { }); } -// Tool handlers -server.setRequestHandler(ListToolsRequestSchema, async () => { +// Tool registrations + +// read_file (deprecated) and read_text_file +const readTextFileHandler = async (args: z.infer) => { + const validPath = await validatePath(args.path); + + if (args.head && args.tail) { + throw new Error("Cannot specify both head and tail parameters simultaneously"); + } + + if (args.tail) { + const tailContent = await tailFile(validPath, args.tail); + return { + content: [{ type: "text" as const, text: tailContent }], + }; + } + + if (args.head) { + const headContent = await headFile(validPath, args.head); + return { + content: [{ type: "text" as const, text: headContent }], + }; + } + const content = await readFileContent(validPath); return { - tools: [ - { - name: "read_file", - description: "Read the complete contents of a file as text. DEPRECATED: Use read_text_file instead.", - inputSchema: zodToJsonSchema(ReadTextFileArgsSchema) as ToolInput, - }, - { - name: "read_text_file", - description: - "Read the complete contents of a file from the file system as text. " + - "Handles various text encodings and provides detailed error messages " + - "if the file cannot be read. Use this tool when you need to examine " + - "the contents of a single file. Use the 'head' parameter to read only " + - "the first N lines of a file, or the 'tail' parameter to read only " + - "the last N lines of a file. Operates on the file as text regardless of extension. " + - "Only works within allowed directories.", - inputSchema: zodToJsonSchema(ReadTextFileArgsSchema) as ToolInput, - }, - { - name: "read_media_file", - description: - "Read an image or audio file. Returns the base64 encoded data and MIME type. " + - "Only works within allowed directories.", - inputSchema: zodToJsonSchema(ReadMediaFileArgsSchema) as ToolInput, - }, - { - name: "read_multiple_files", - description: - "Read the contents of multiple files simultaneously. This is more " + - "efficient than reading files one by one when you need to analyze " + - "or compare multiple files. Each file's content is returned with its " + - "path as a reference. Failed reads for individual files won't stop " + - "the entire operation. Only works within allowed directories.", - inputSchema: zodToJsonSchema(ReadMultipleFilesArgsSchema) as ToolInput, - }, - { - name: "write_file", - description: - "Create a new file or completely overwrite an existing file with new content. " + - "Use with caution as it will overwrite existing files without warning. " + - "Handles text content with proper encoding. Only works within allowed directories.", - inputSchema: zodToJsonSchema(WriteFileArgsSchema) as ToolInput, - }, - { - name: "edit_file", - description: - "Make line-based edits to a text file. Each edit replaces exact line sequences " + - "with new content. Returns a git-style diff showing the changes made. " + - "Only works within allowed directories.", - inputSchema: zodToJsonSchema(EditFileArgsSchema) as ToolInput, - }, - { - name: "create_directory", - description: - "Create a new directory or ensure a directory exists. Can create multiple " + - "nested directories in one operation. If the directory already exists, " + - "this operation will succeed silently. Perfect for setting up directory " + - "structures for projects or ensuring required paths exist. Only works within allowed directories.", - inputSchema: zodToJsonSchema(CreateDirectoryArgsSchema) as ToolInput, - }, - { - name: "list_directory", - description: - "Get a detailed listing of all files and directories in a specified path. " + - "Results clearly distinguish between files and directories with [FILE] and [DIR] " + - "prefixes. This tool is essential for understanding directory structure and " + - "finding specific files within a directory. Only works within allowed directories.", - inputSchema: zodToJsonSchema(ListDirectoryArgsSchema) as ToolInput, - }, - { - name: "list_directory_with_sizes", - description: - "Get a detailed listing of all files and directories in a specified path, including sizes. " + - "Results clearly distinguish between files and directories with [FILE] and [DIR] " + - "prefixes. This tool is useful for understanding directory structure and " + - "finding specific files within a directory. Only works within allowed directories.", - inputSchema: zodToJsonSchema(ListDirectoryWithSizesArgsSchema) as ToolInput, - }, - { - name: "directory_tree", - description: - "Get a recursive tree view of files and directories as a JSON structure. " + - "Each entry includes 'name', 'type' (file/directory), and 'children' for directories. " + - "Files have no children array, while directories always have a children array (which may be empty). " + - "The output is formatted with 2-space indentation for readability. Only works within allowed directories.", - inputSchema: zodToJsonSchema(DirectoryTreeArgsSchema) as ToolInput, - }, - { - name: "move_file", - description: - "Move or rename files and directories. Can move files between directories " + - "and rename them in a single operation. If the destination exists, the " + - "operation will fail. Works across different directories and can be used " + - "for simple renaming within the same directory. Both source and destination must be within allowed directories.", - inputSchema: zodToJsonSchema(MoveFileArgsSchema) as ToolInput, - }, - { - name: "search_files", - description: - "Recursively search for files and directories matching a pattern. " + - "The patterns should be glob-style patterns that match paths relative to the working directory. " + - "Use pattern like '*.ext' to match files in current directory, and '**/*.ext' to match files in all subdirectories. " + - "Returns full paths to all matching items. Great for finding files when you don't know their exact location. " + - "Only searches within allowed directories.", - inputSchema: zodToJsonSchema(SearchFilesArgsSchema) as ToolInput, - }, - { - name: "get_file_info", - description: - "Retrieve detailed metadata about a file or directory. Returns comprehensive " + - "information including size, creation time, last modified time, permissions, " + - "and type. This tool is perfect for understanding file characteristics " + - "without reading the actual content. Only works within allowed directories.", - inputSchema: zodToJsonSchema(GetFileInfoArgsSchema) as ToolInput, - }, - { - name: "list_allowed_directories", - description: - "Returns the list of directories that this server is allowed to access. " + - "Subdirectories within these allowed directories are also accessible. " + - "Use this to understand which directories and their nested paths are available " + - "before trying to access files.", - inputSchema: { - type: "object", - properties: {}, - required: [], - }, - }, - ], + content: [{ type: "text" as const, text: content }], }; -}); +}; + +server.registerTool( + "read_file", + { + title: "Read File (Deprecated)", + description: "Read the complete contents of a file as text. DEPRECATED: Use read_text_file instead.", + inputSchema: { + path: z.string(), + tail: z.number().optional().describe("If provided, returns only the last N lines of the file"), + head: z.number().optional().describe("If provided, returns only the first N lines of the file") + }, + outputSchema: { + content: z.array(z.object({ + type: z.literal("text"), + text: z.string() + })) + } + }, + readTextFileHandler +); +server.registerTool( + "read_text_file", + { + title: "Read Text File", + description: + "Read the complete contents of a file from the file system as text. " + + "Handles various text encodings and provides detailed error messages " + + "if the file cannot be read. Use this tool when you need to examine " + + "the contents of a single file. Use the 'head' parameter to read only " + + "the first N lines of a file, or the 'tail' parameter to read only " + + "the last N lines of a file. Operates on the file as text regardless of extension. " + + "Only works within allowed directories.", + inputSchema: { + path: z.string(), + tail: z.number().optional().describe("If provided, returns only the last N lines of the file"), + head: z.number().optional().describe("If provided, returns only the first N lines of the file") + }, + outputSchema: { + content: z.array(z.object({ + type: z.literal("text"), + text: z.string() + })) + } + }, + readTextFileHandler +); -server.setRequestHandler(CallToolRequestSchema, async (request) => { - try { - const { name, arguments: args } = request.params; - - switch (name) { - case "read_file": - case "read_text_file": { - const parsed = ReadTextFileArgsSchema.safeParse(args); - if (!parsed.success) { - throw new Error(`Invalid arguments for read_text_file: ${parsed.error}`); - } - const validPath = await validatePath(parsed.data.path); +server.registerTool( + "read_media_file", + { + title: "Read Media File", + description: + "Read an image or audio file. Returns the base64 encoded data and MIME type. " + + "Only works within allowed directories.", + inputSchema: { + path: z.string() + }, + outputSchema: { + content: z.array(z.object({ + type: z.enum(["image", "audio"]), + data: z.string(), + mimeType: z.string() + })) + } + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + const extension = path.extname(validPath).toLowerCase(); + const mimeTypes: Record = { + ".png": "image/png", + ".jpg": "image/jpeg", + ".jpeg": "image/jpeg", + ".gif": "image/gif", + ".webp": "image/webp", + ".bmp": "image/bmp", + ".svg": "image/svg+xml", + ".mp3": "audio/mpeg", + ".wav": "audio/wav", + ".ogg": "audio/ogg", + ".flac": "audio/flac", + }; + const mimeType = mimeTypes[extension] || "application/octet-stream"; + const data = await readFileAsBase64Stream(validPath); + + if (mimeType.startsWith("audio/")) { + return { + content: [{ type: "audio" as const, data, mimeType }], + }; + } else { + // For all other media types including images and unknown types, return as image + // (MCP ImageContent can handle any base64-encoded binary data with appropriate mimeType) + return { + content: [{ type: "image" as const, data, mimeType }], + }; + } + } +); - if (parsed.data.head && parsed.data.tail) { - throw new Error("Cannot specify both head and tail parameters simultaneously"); +server.registerTool( + "read_multiple_files", + { + title: "Read Multiple Files", + description: + "Read the contents of multiple files simultaneously. This is more " + + "efficient than reading files one by one when you need to analyze " + + "or compare multiple files. Each file's content is returned with its " + + "path as a reference. Failed reads for individual files won't stop " + + "the entire operation. Only works within allowed directories.", + inputSchema: { + paths: z.array(z.string()) + .min(1) + .describe("Array of file paths to read. Each path must be a string pointing to a valid file within allowed directories.") + }, + outputSchema: { + content: z.array(z.object({ + type: z.literal("text"), + text: z.string() + })) + } + }, + async (args: z.infer) => { + const results = await Promise.all( + args.paths.map(async (filePath: string) => { + try { + const validPath = await validatePath(filePath); + const content = await readFileContent(validPath); + return `${filePath}:\n${content}\n`; + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error); + return `${filePath}: Error - ${errorMessage}`; } + }), + ); + return { + content: [{ type: "text" as const, text: results.join("\n---\n") }], + }; + } +); + +server.registerTool( + "write_file", + { + title: "Write File", + description: + "Create a new file or completely overwrite an existing file with new content. " + + "Use with caution as it will overwrite existing files without warning. " + + "Handles text content with proper encoding. Only works within allowed directories.", + inputSchema: { + path: z.string(), + content: z.string() + }, + outputSchema: { + content: z.array(z.object({ + type: z.literal("text"), + text: z.string() + })) + } + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + await writeFileContent(validPath, args.content); + return { + content: [{ type: "text" as const, text: `Successfully wrote to ${args.path}` }], + }; + } +); + +server.registerTool( + "edit_file", + { + title: "Edit File", + description: + "Make line-based edits to a text file. Each edit replaces exact line sequences " + + "with new content. Returns a git-style diff showing the changes made. " + + "Only works within allowed directories.", + inputSchema: { + path: z.string(), + edits: z.array(z.object({ + oldText: z.string().describe("Text to search for - must match exactly"), + newText: z.string().describe("Text to replace with") + })), + dryRun: z.boolean().default(false).describe("Preview changes using git-style diff format") + }, + outputSchema: { + content: z.array(z.object({ + type: z.literal("text"), + text: z.string() + })) + } + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + const result = await applyFileEdits(validPath, args.edits, args.dryRun); + return { + content: [{ type: "text" as const, text: result }], + }; + } +); - if (parsed.data.tail) { - // Use memory-efficient tail implementation for large files - const tailContent = await tailFile(validPath, parsed.data.tail); +server.registerTool( + "create_directory", + { + title: "Create Directory", + description: + "Create a new directory or ensure a directory exists. Can create multiple " + + "nested directories in one operation. If the directory already exists, " + + "this operation will succeed silently. Perfect for setting up directory " + + "structures for projects or ensuring required paths exist. Only works within allowed directories.", + inputSchema: { + path: z.string() + }, + outputSchema: { + content: z.array(z.object({ + type: z.literal("text"), + text: z.string() + })) + } + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + await fs.mkdir(validPath, { recursive: true }); + return { + content: [{ type: "text" as const, text: `Successfully created directory ${args.path}` }], + }; + } +); + +server.registerTool( + "list_directory", + { + title: "List Directory", + description: + "Get a detailed listing of all files and directories in a specified path. " + + "Results clearly distinguish between files and directories with [FILE] and [DIR] " + + "prefixes. This tool is essential for understanding directory structure and " + + "finding specific files within a directory. Only works within allowed directories.", + inputSchema: { + path: z.string() + }, + outputSchema: { + content: z.array(z.object({ + type: z.literal("text"), + text: z.string() + })) + } + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + const entries = await fs.readdir(validPath, { withFileTypes: true }); + const formatted = entries + .map((entry) => `${entry.isDirectory() ? "[DIR]" : "[FILE]"} ${entry.name}`) + .join("\n"); + return { + content: [{ type: "text" as const, text: formatted }], + }; + } +); + +server.registerTool( + "list_directory_with_sizes", + { + title: "List Directory with Sizes", + description: + "Get a detailed listing of all files and directories in a specified path, including sizes. " + + "Results clearly distinguish between files and directories with [FILE] and [DIR] " + + "prefixes. This tool is useful for understanding directory structure and " + + "finding specific files within a directory. Only works within allowed directories.", + inputSchema: { + path: z.string(), + sortBy: z.enum(["name", "size"]).optional().default("name").describe("Sort entries by name or size") + }, + outputSchema: { + content: z.array(z.object({ + type: z.literal("text"), + text: z.string() + })) + } + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + const entries = await fs.readdir(validPath, { withFileTypes: true }); + + // Get detailed information for each entry + const detailedEntries = await Promise.all( + entries.map(async (entry) => { + const entryPath = path.join(validPath, entry.name); + try { + const stats = await fs.stat(entryPath); return { - content: [{ type: "text", text: tailContent }], + name: entry.name, + isDirectory: entry.isDirectory(), + size: stats.size, + mtime: stats.mtime }; - } - - if (parsed.data.head) { - // Use memory-efficient head implementation for large files - const headContent = await headFile(validPath, parsed.data.head); + } catch (error) { return { - content: [{ type: "text", text: headContent }], + name: entry.name, + isDirectory: entry.isDirectory(), + size: 0, + mtime: new Date(0) }; } - const content = await readFileContent(validPath); - return { - content: [{ type: "text", text: content }], - }; - } + }) + ); - case "read_media_file": { - const parsed = ReadMediaFileArgsSchema.safeParse(args); - if (!parsed.success) { - throw new Error(`Invalid arguments for read_media_file: ${parsed.error}`); - } - const validPath = await validatePath(parsed.data.path); - const extension = path.extname(validPath).toLowerCase(); - const mimeTypes: Record = { - ".png": "image/png", - ".jpg": "image/jpeg", - ".jpeg": "image/jpeg", - ".gif": "image/gif", - ".webp": "image/webp", - ".bmp": "image/bmp", - ".svg": "image/svg+xml", - ".mp3": "audio/mpeg", - ".wav": "audio/wav", - ".ogg": "audio/ogg", - ".flac": "audio/flac", - }; - const mimeType = mimeTypes[extension] || "application/octet-stream"; - const data = await readFileAsBase64Stream(validPath); - const type = mimeType.startsWith("image/") - ? "image" - : mimeType.startsWith("audio/") - ? "audio" - : "blob"; - return { - content: [{ type, data, mimeType }], - }; - } - - case "read_multiple_files": { - const parsed = ReadMultipleFilesArgsSchema.safeParse(args); - if (!parsed.success) { - throw new Error(`Invalid arguments for read_multiple_files: ${parsed.error}`); - } - const results = await Promise.all( - parsed.data.paths.map(async (filePath: string) => { - try { - const validPath = await validatePath(filePath); - const content = await readFileContent(validPath); - return `${filePath}:\n${content}\n`; - } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error); - return `${filePath}: Error - ${errorMessage}`; - } - }), - ); - return { - content: [{ type: "text", text: results.join("\n---\n") }], - }; + // Sort entries based on sortBy parameter + const sortedEntries = [...detailedEntries].sort((a, b) => { + if (args.sortBy === 'size') { + return b.size - a.size; // Descending by size } + // Default sort by name + return a.name.localeCompare(b.name); + }); - case "write_file": { - const parsed = WriteFileArgsSchema.safeParse(args); - if (!parsed.success) { - throw new Error(`Invalid arguments for write_file: ${parsed.error}`); - } - const validPath = await validatePath(parsed.data.path); - await writeFileContent(validPath, parsed.data.content); - return { - content: [{ type: "text", text: `Successfully wrote to ${parsed.data.path}` }], - }; - } + // Format the output + const formattedEntries = sortedEntries.map(entry => + `${entry.isDirectory ? "[DIR]" : "[FILE]"} ${entry.name.padEnd(30)} ${ + entry.isDirectory ? "" : formatSize(entry.size).padStart(10) + }` + ); - case "edit_file": { - const parsed = EditFileArgsSchema.safeParse(args); - if (!parsed.success) { - throw new Error(`Invalid arguments for edit_file: ${parsed.error}`); - } - const validPath = await validatePath(parsed.data.path); - const result = await applyFileEdits(validPath, parsed.data.edits, parsed.data.dryRun); - return { - content: [{ type: "text", text: result }], - }; - } + // Add summary + const totalFiles = detailedEntries.filter(e => !e.isDirectory).length; + const totalDirs = detailedEntries.filter(e => e.isDirectory).length; + const totalSize = detailedEntries.reduce((sum, entry) => sum + (entry.isDirectory ? 0 : entry.size), 0); - case "create_directory": { - const parsed = CreateDirectoryArgsSchema.safeParse(args); - if (!parsed.success) { - throw new Error(`Invalid arguments for create_directory: ${parsed.error}`); - } - const validPath = await validatePath(parsed.data.path); - await fs.mkdir(validPath, { recursive: true }); - return { - content: [{ type: "text", text: `Successfully created directory ${parsed.data.path}` }], - }; - } + const summary = [ + "", + `Total: ${totalFiles} files, ${totalDirs} directories`, + `Combined size: ${formatSize(totalSize)}` + ]; - case "list_directory": { - const parsed = ListDirectoryArgsSchema.safeParse(args); - if (!parsed.success) { - throw new Error(`Invalid arguments for list_directory: ${parsed.error}`); - } - const validPath = await validatePath(parsed.data.path); - const entries = await fs.readdir(validPath, { withFileTypes: true }); - const formatted = entries - .map((entry) => `${entry.isDirectory() ? "[DIR]" : "[FILE]"} ${entry.name}`) - .join("\n"); - return { - content: [{ type: "text", text: formatted }], - }; - } + return { + content: [{ + type: "text" as const, + text: [...formattedEntries, ...summary].join("\n") + }], + }; + } +); - case "list_directory_with_sizes": { - const parsed = ListDirectoryWithSizesArgsSchema.safeParse(args); - if (!parsed.success) { - throw new Error(`Invalid arguments for list_directory_with_sizes: ${parsed.error}`); - } - const validPath = await validatePath(parsed.data.path); - const entries = await fs.readdir(validPath, { withFileTypes: true }); - - // Get detailed information for each entry - const detailedEntries = await Promise.all( - entries.map(async (entry) => { - const entryPath = path.join(validPath, entry.name); - try { - const stats = await fs.stat(entryPath); - return { - name: entry.name, - isDirectory: entry.isDirectory(), - size: stats.size, - mtime: stats.mtime - }; - } catch (error) { - return { - name: entry.name, - isDirectory: entry.isDirectory(), - size: 0, - mtime: new Date(0) - }; - } - }) - ); - - // Sort entries based on sortBy parameter - const sortedEntries = [...detailedEntries].sort((a, b) => { - if (parsed.data.sortBy === 'size') { - return b.size - a.size; // Descending by size +server.registerTool( + "directory_tree", + { + title: "Directory Tree", + description: + "Get a recursive tree view of files and directories as a JSON structure. " + + "Each entry includes 'name', 'type' (file/directory), and 'children' for directories. " + + "Files have no children array, while directories always have a children array (which may be empty). " + + "The output is formatted with 2-space indentation for readability. Only works within allowed directories.", + inputSchema: { + path: z.string(), + excludePatterns: z.array(z.string()).optional().default([]) + }, + outputSchema: { + content: z.array(z.object({ + type: z.literal("text"), + text: z.string() + })) + } + }, + async (args: z.infer) => { + interface TreeEntry { + name: string; + type: 'file' | 'directory'; + children?: TreeEntry[]; + } + const rootPath = args.path; + + async function buildTree(currentPath: string, excludePatterns: string[] = []): Promise { + const validPath = await validatePath(currentPath); + const entries = await fs.readdir(validPath, { withFileTypes: true }); + const result: TreeEntry[] = []; + + for (const entry of entries) { + const relativePath = path.relative(rootPath, path.join(currentPath, entry.name)); + const shouldExclude = excludePatterns.some(pattern => { + if (pattern.includes('*')) { + return minimatch(relativePath, pattern, { dot: true }); } - // Default sort by name - return a.name.localeCompare(b.name); + // For files: match exact name or as part of path + // For directories: match as directory path + return minimatch(relativePath, pattern, { dot: true }) || + minimatch(relativePath, `**/${pattern}`, { dot: true }) || + minimatch(relativePath, `**/${pattern}/**`, { dot: true }); }); + if (shouldExclude) + continue; - // Format the output - const formattedEntries = sortedEntries.map(entry => - `${entry.isDirectory ? "[DIR]" : "[FILE]"} ${entry.name.padEnd(30)} ${ - entry.isDirectory ? "" : formatSize(entry.size).padStart(10) - }` - ); - - // Add summary - const totalFiles = detailedEntries.filter(e => !e.isDirectory).length; - const totalDirs = detailedEntries.filter(e => e.isDirectory).length; - const totalSize = detailedEntries.reduce((sum, entry) => sum + (entry.isDirectory ? 0 : entry.size), 0); - - const summary = [ - "", - `Total: ${totalFiles} files, ${totalDirs} directories`, - `Combined size: ${formatSize(totalSize)}` - ]; - - return { - content: [{ - type: "text", - text: [...formattedEntries, ...summary].join("\n") - }], + const entryData: TreeEntry = { + name: entry.name, + type: entry.isDirectory() ? 'directory' : 'file' }; - } - - case "directory_tree": { - const parsed = DirectoryTreeArgsSchema.safeParse(args); - if (!parsed.success) { - throw new Error(`Invalid arguments for directory_tree: ${parsed.error}`); - } - interface TreeEntry { - name: string; - type: 'file' | 'directory'; - children?: TreeEntry[]; - } - const rootPath = parsed.data.path; - - async function buildTree(currentPath: string, excludePatterns: string[] = []): Promise { - const validPath = await validatePath(currentPath); - const entries = await fs.readdir(validPath, {withFileTypes: true}); - const result: TreeEntry[] = []; - - for (const entry of entries) { - const relativePath = path.relative(rootPath, path.join(currentPath, entry.name)); - const shouldExclude = excludePatterns.some(pattern => { - if (pattern.includes('*')) { - return minimatch(relativePath, pattern, {dot: true}); - } - // For files: match exact name or as part of path - // For directories: match as directory path - return minimatch(relativePath, pattern, {dot: true}) || - minimatch(relativePath, `**/${pattern}`, {dot: true}) || - minimatch(relativePath, `**/${pattern}/**`, {dot: true}); - }); - if (shouldExclude) - continue; - - const entryData: TreeEntry = { - name: entry.name, - type: entry.isDirectory() ? 'directory' : 'file' - }; - - if (entry.isDirectory()) { - const subPath = path.join(currentPath, entry.name); - entryData.children = await buildTree(subPath, excludePatterns); - } - - result.push(entryData); - } - - return result; + if (entry.isDirectory()) { + const subPath = path.join(currentPath, entry.name); + entryData.children = await buildTree(subPath, excludePatterns); } - const treeData = await buildTree(rootPath, parsed.data.excludePatterns); - return { - content: [{ - type: "text", - text: JSON.stringify(treeData, null, 2) - }], - }; + result.push(entryData); } - case "move_file": { - const parsed = MoveFileArgsSchema.safeParse(args); - if (!parsed.success) { - throw new Error(`Invalid arguments for move_file: ${parsed.error}`); - } - const validSourcePath = await validatePath(parsed.data.source); - const validDestPath = await validatePath(parsed.data.destination); - await fs.rename(validSourcePath, validDestPath); - return { - content: [{ type: "text", text: `Successfully moved ${parsed.data.source} to ${parsed.data.destination}` }], - }; - } + return result; + } - case "search_files": { - const parsed = SearchFilesArgsSchema.safeParse(args); - if (!parsed.success) { - throw new Error(`Invalid arguments for search_files: ${parsed.error}`); - } - const validPath = await validatePath(parsed.data.path); - const results = await searchFilesWithValidation(validPath, parsed.data.pattern, allowedDirectories, { excludePatterns: parsed.data.excludePatterns }); - return { - content: [{ type: "text", text: results.length > 0 ? results.join("\n") : "No matches found" }], - }; - } + const treeData = await buildTree(rootPath, args.excludePatterns); + return { + content: [{ + type: "text" as const, + text: JSON.stringify(treeData, null, 2) + }], + }; + } +); - case "get_file_info": { - const parsed = GetFileInfoArgsSchema.safeParse(args); - if (!parsed.success) { - throw new Error(`Invalid arguments for get_file_info: ${parsed.error}`); - } - const validPath = await validatePath(parsed.data.path); - const info = await getFileStats(validPath); - return { - content: [{ type: "text", text: Object.entries(info) - .map(([key, value]) => `${key}: ${value}`) - .join("\n") }], - }; - } +server.registerTool( + "move_file", + { + title: "Move File", + description: + "Move or rename files and directories. Can move files between directories " + + "and rename them in a single operation. If the destination exists, the " + + "operation will fail. Works across different directories and can be used " + + "for simple renaming within the same directory. Both source and destination must be within allowed directories.", + inputSchema: { + source: z.string(), + destination: z.string() + }, + outputSchema: { + content: z.array(z.object({ + type: z.literal("text"), + text: z.string() + })) + } + }, + async (args: z.infer) => { + const validSourcePath = await validatePath(args.source); + const validDestPath = await validatePath(args.destination); + await fs.rename(validSourcePath, validDestPath); + return { + content: [{ type: "text" as const, text: `Successfully moved ${args.source} to ${args.destination}` }], + }; + } +); - case "list_allowed_directories": { - return { - content: [{ - type: "text", - text: `Allowed directories:\n${allowedDirectories.join('\n')}` - }], - }; - } +server.registerTool( + "search_files", + { + title: "Search Files", + description: + "Recursively search for files and directories matching a pattern. " + + "The patterns should be glob-style patterns that match paths relative to the working directory. " + + "Use pattern like '*.ext' to match files in current directory, and '**/*.ext' to match files in all subdirectories. " + + "Returns full paths to all matching items. Great for finding files when you don't know their exact location. " + + "Only searches within allowed directories.", + inputSchema: { + path: z.string(), + pattern: z.string(), + excludePatterns: z.array(z.string()).optional().default([]) + }, + outputSchema: { + content: z.array(z.object({ + type: z.literal("text"), + text: z.string() + })) + } + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + const results = await searchFilesWithValidation(validPath, args.pattern, allowedDirectories, { excludePatterns: args.excludePatterns }); + return { + content: [{ type: "text" as const, text: results.length > 0 ? results.join("\n") : "No matches found" }], + }; + } +); - default: - throw new Error(`Unknown tool: ${name}`); +server.registerTool( + "get_file_info", + { + title: "Get File Info", + description: + "Retrieve detailed metadata about a file or directory. Returns comprehensive " + + "information including size, creation time, last modified time, permissions, " + + "and type. This tool is perfect for understanding file characteristics " + + "without reading the actual content. Only works within allowed directories.", + inputSchema: { + path: z.string() + }, + outputSchema: { + content: z.array(z.object({ + type: z.literal("text"), + text: z.string() + })) } - } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error); + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + const info = await getFileStats(validPath); return { - content: [{ type: "text", text: `Error: ${errorMessage}` }], - isError: true, + content: [{ type: "text" as const, text: Object.entries(info) + .map(([key, value]) => `${key}: ${value}`) + .join("\n") }], }; } -}); +); + +server.registerTool( + "list_allowed_directories", + { + title: "List Allowed Directories", + description: + "Returns the list of directories that this server is allowed to access. " + + "Subdirectories within these allowed directories are also accessible. " + + "Use this to understand which directories and their nested paths are available " + + "before trying to access files.", + inputSchema: {}, + outputSchema: { + content: z.array(z.object({ + type: z.literal("text"), + text: z.string() + })) + } + }, + async () => { + return { + content: [{ + type: "text" as const, + text: `Allowed directories:\n${allowedDirectories.join('\n')}` + }], + }; + } +); // Updates allowed directories based on MCP client roots async function updateAllowedDirectoriesFromRoots(requestedRoots: Root[]) { @@ -653,10 +740,10 @@ async function updateAllowedDirectoriesFromRoots(requestedRoots: Root[]) { } // Handles dynamic roots updates during runtime, when client sends "roots/list_changed" notification, server fetches the updated roots and replaces all allowed directories with the new roots. -server.setNotificationHandler(RootsListChangedNotificationSchema, async () => { +server.server.setNotificationHandler(RootsListChangedNotificationSchema, async () => { try { // Request the updated roots list from the client - const response = await server.listRoots(); + const response = await server.server.listRoots(); if (response && 'roots' in response) { await updateAllowedDirectoriesFromRoots(response.roots); } @@ -666,12 +753,12 @@ server.setNotificationHandler(RootsListChangedNotificationSchema, async () => { }); // Handles post-initialization setup, specifically checking for and fetching MCP roots. -server.oninitialized = async () => { - const clientCapabilities = server.getClientCapabilities(); +server.server.oninitialized = async () => { + const clientCapabilities = server.server.getClientCapabilities(); if (clientCapabilities?.roots) { try { - const response = await server.listRoots(); + const response = await server.server.listRoots(); if (response && 'roots' in response) { await updateAllowedDirectoriesFromRoots(response.roots); } else { diff --git a/src/filesystem/tsconfig.json b/src/filesystem/tsconfig.json index 31a299d96b..db219c5b45 100644 --- a/src/filesystem/tsconfig.json +++ b/src/filesystem/tsconfig.json @@ -12,6 +12,7 @@ "exclude": [ "**/__tests__/**", "**/*.test.ts", - "**/*.spec.ts" + "**/*.spec.ts", + "vitest.config.ts" ] } From 765f865b04e1b96debdd358de355f3d697cbadf4 Mon Sep 17 00:00:00 2001 From: Adam Jones Date: Mon, 17 Nov 2025 15:18:57 +0000 Subject: [PATCH 2/3] fix: use default import for minimatch minimatch v10+ uses default export instead of named export --- src/filesystem/index.ts | 2 +- src/filesystem/lib.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index e8ddc233f8..b49fe9a2d2 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -10,7 +10,7 @@ import fs from "fs/promises"; import { createReadStream } from "fs"; import path from "path"; import { z } from "zod"; -import { minimatch } from "minimatch"; +import minimatch from "minimatch"; import { normalizePath, expandHome } from './path-utils.js'; import { getValidRootDirectories } from './roots-utils.js'; import { diff --git a/src/filesystem/lib.ts b/src/filesystem/lib.ts index 240ca0d476..0179231e42 100644 --- a/src/filesystem/lib.ts +++ b/src/filesystem/lib.ts @@ -3,7 +3,7 @@ import path from "path"; import os from 'os'; import { randomBytes } from 'crypto'; import { diffLines, createTwoFilesPatch } from 'diff'; -import { minimatch } from 'minimatch'; +import minimatch from 'minimatch'; import { normalizePath, expandHome } from './path-utils.js'; import { isPathWithinAllowedDirectories } from './path-validation.js'; From 6ccad761c6c97ccb848c642d8966b011459f6359 Mon Sep 17 00:00:00 2001 From: Adam Jones Date: Mon, 17 Nov 2025 15:58:51 +0000 Subject: [PATCH 3/3] fix(filesystem): use named import for minimatch The minimatch module doesn't have a default export, so we need to use the named import syntax instead. Fixes TypeScript compilation error: error TS2613: Module has no default export. Did you mean to use 'import { minimatch } from "minimatch"' instead? --- src/filesystem/index.ts | 2 +- src/filesystem/lib.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index b49fe9a2d2..e8ddc233f8 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -10,7 +10,7 @@ import fs from "fs/promises"; import { createReadStream } from "fs"; import path from "path"; import { z } from "zod"; -import minimatch from "minimatch"; +import { minimatch } from "minimatch"; import { normalizePath, expandHome } from './path-utils.js'; import { getValidRootDirectories } from './roots-utils.js'; import { diff --git a/src/filesystem/lib.ts b/src/filesystem/lib.ts index 0179231e42..240ca0d476 100644 --- a/src/filesystem/lib.ts +++ b/src/filesystem/lib.ts @@ -3,7 +3,7 @@ import path from "path"; import os from 'os'; import { randomBytes } from 'crypto'; import { diffLines, createTwoFilesPatch } from 'diff'; -import minimatch from 'minimatch'; +import { minimatch } from 'minimatch'; import { normalizePath, expandHome } from './path-utils.js'; import { isPathWithinAllowedDirectories } from './path-validation.js';