diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 47736a5..a23d2b1 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-alpha.16" + ".": "0.1.0-alpha.17" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 8a85633..672ec0e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## 0.1.0-alpha.17 (2025-12-19) + +Full Changelog: [v0.1.0-alpha.16...v0.1.0-alpha.17](https://github.com/Scan-Documents/node-sdk/compare/v0.1.0-alpha.16...v0.1.0-alpha.17) + +### ⚠ BREAKING CHANGES + +* **mcp:** remove deprecated tool schemes +* **mcp:** **Migration:** To migrate, simply modify the command used to invoke the MCP server. Currently, the only supported tool scheme is code mode. Now, starting the server with just `node /path/to/mcp/server` or `npx package-name` will invoke code tools: changing your command to one of these is likely all you will need to do. + +### Chores + +* **mcp:** remove deprecated tool schemes ([49f084c](https://github.com/Scan-Documents/node-sdk/commit/49f084c05786fa386b8da397fce4aa20c92c473f)) + ## 0.1.0-alpha.16 (2025-12-18) Full Changelog: [v0.1.0-alpha.15...v0.1.0-alpha.16](https://github.com/Scan-Documents/node-sdk/compare/v0.1.0-alpha.15...v0.1.0-alpha.16) diff --git a/package.json b/package.json index e7dcf79..d6f8483 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "scan-documents", - "version": "0.1.0-alpha.16", + "version": "0.1.0-alpha.17", "description": "The official TypeScript library for the Scan Documents API", "author": "Scan Documents ", "types": "dist/index.d.ts", diff --git a/packages/mcp-server/README.md b/packages/mcp-server/README.md index ed234e6..b0cd0b1 100644 --- a/packages/mcp-server/README.md +++ b/packages/mcp-server/README.md @@ -25,7 +25,7 @@ For clients with a configuration JSON, it might look something like this: "mcpServers": { "scan_documents_api": { "command": "npx", - "args": ["-y", "scan-documents-mcp", "--client=claude", "--tools=all"], + "args": ["-y", "scan-documents-mcp"], "env": { "SCAN_DOCUMENTS_API_KEY": "My API Key" } @@ -57,110 +57,22 @@ environment variables in Claude Code's `.claude.json`, which can be found in you claude mcp add --transport stdio scan_documents_api --env SCAN_DOCUMENTS_API_KEY="Your SCAN_DOCUMENTS_API_KEY here." -- npx -y scan-documents-mcp ``` -## Exposing endpoints to your MCP Client +## Code Mode -There are three ways to expose endpoints as tools in the MCP server: +This MCP server is built on the "Code Mode" tool scheme. In this MCP Server, +your agent will write code against the TypeScript SDK, which will then be executed in an +isolated sandbox. To accomplish this, the server will expose two tools to your agent: -1. Exposing one tool per endpoint, and filtering as necessary -2. Exposing a set of tools to dynamically discover and invoke endpoints from the API -3. Exposing a docs search tool and a code execution tool, allowing the client to write code to be executed against the TypeScript client +- The first tool is a docs search tool, which can be used to generically query for + documentation about your API/SDK. -### Filtering endpoints and tools +- The second tool is a code tool, where the agent can write code against the TypeScript SDK. + The code will be executed in a sandbox environment without web or filesystem access. Then, + anything the code returns or prints will be returned to the agent as the result of the + tool call. -You can run the package on the command line to discover and filter the set of tools that are exposed by the -MCP Server. This can be helpful for large APIs where including all endpoints at once is too much for your AI's -context window. - -You can filter by multiple aspects: - -- `--tool` includes a specific tool by name -- `--resource` includes all tools under a specific resource, and can have wildcards, e.g. `my.resource*` -- `--operation` includes just read (get/list) or just write operations - -### Dynamic tools - -If you specify `--tools=dynamic` to the MCP server, instead of exposing one tool per endpoint in the API, it will -expose the following tools: - -1. `list_api_endpoints` - Discovers available endpoints, with optional filtering by search query -2. `get_api_endpoint_schema` - Gets detailed schema information for a specific endpoint -3. `invoke_api_endpoint` - Executes any endpoint with the appropriate parameters - -This allows you to have the full set of API endpoints available to your MCP Client, while not requiring that all -of their schemas be loaded into context at once. Instead, the LLM will automatically use these tools together to -search for, look up, and invoke endpoints dynamically. However, due to the indirect nature of the schemas, it -can struggle to provide the correct properties a bit more than when tools are imported explicitly. Therefore, -you can opt-in to explicit tools, the dynamic tools, or both. - -See more information with `--help`. - -All of these command-line options can be repeated, combined together, and have corresponding exclusion versions (e.g. `--no-tool`). - -Use `--list` to see the list of available tools, or see below. - -### Code execution - -If you specify `--tools=code` to the MCP server, it will expose just two tools: - -- `search_docs` - Searches the API documentation and returns a list of markdown results -- `execute` - Runs code against the TypeScript client - -This allows the LLM to implement more complex logic by chaining together many API calls without loading -intermediary results into its context window. - -The code execution itself happens in a Deno sandbox that has network access only to the base URL for the API. - -### Specifying the MCP Client - -Different clients have varying abilities to handle arbitrary tools and schemas. - -You can specify the client you are using with the `--client` argument, and the MCP server will automatically -serve tools and schemas that are more compatible with that client. - -- `--client=`: Set all capabilities based on a known MCP client - - - Valid values: `openai-agents`, `claude`, `claude-code`, `cursor` - - Example: `--client=cursor` - -Additionally, if you have a client not on the above list, or the client has gotten better -over time, you can manually enable or disable certain capabilities: - -- `--capability=`: Specify individual client capabilities - - Available capabilities: - - `top-level-unions`: Enable support for top-level unions in tool schemas - - `valid-json`: Enable JSON string parsing for arguments - - `refs`: Enable support for $ref pointers in schemas - - `unions`: Enable support for union types (anyOf) in schemas - - `formats`: Enable support for format validations in schemas (e.g. date-time, email) - - `tool-name-length=N`: Set maximum tool name length to N characters - - Example: `--capability=top-level-unions --capability=tool-name-length=40` - - Example: `--capability=top-level-unions,tool-name-length=40` - -### Examples - -1. Filter for read operations on cards: - -```bash ---resource=cards --operation=read -``` - -2. Exclude specific tools while including others: - -```bash ---resource=cards --no-tool=create_cards -``` - -3. Configure for Cursor client with custom max tool name length: - -```bash ---client=cursor --capability=tool-name-length=40 -``` - -4. Complex filtering with multiple criteria: - -```bash ---resource=cards,accounts --operation=read --tag=kyc --no-tool=create_cards -``` +Using this scheme, agents are capable of performing very complex tasks deterministically +and repeatably. ## Running remotely @@ -185,89 +97,3 @@ A configuration JSON for this server might look like this, assuming the server i } } ``` - -The command-line arguments for filtering tools and specifying clients can also be used as query parameters in the URL. -For example, to exclude specific tools while including others, use the URL: - -``` -http://localhost:3000?resource=cards&resource=accounts&no_tool=create_cards -``` - -Or, to configure for the Cursor client, with a custom max tool name length, use the URL: - -``` -http://localhost:3000?client=cursor&capability=tool-name-length%3D40 -``` - -## Importing the tools and server individually - -```js -// Import the server, generated endpoints, or the init function -import { server, endpoints, init } from "scan-documents-mcp/server"; - -// import a specific tool -import retrieveFiles from "scan-documents-mcp/tools/files/retrieve-files"; - -// initialize the server and all endpoints -init({ server, endpoints }); - -// manually start server -const transport = new StdioServerTransport(); -await server.connect(transport); - -// or initialize your own server with specific tools -const myServer = new McpServer(...); - -// define your own endpoint -const myCustomEndpoint = { - tool: { - name: 'my_custom_tool', - description: 'My custom tool', - inputSchema: zodToJsonSchema(z.object({ a_property: z.string() })), - }, - handler: async (client: client, args: any) => { - return { myResponse: 'Hello world!' }; - }) -}; - -// initialize the server with your custom endpoints -init({ server: myServer, endpoints: [retrieveFiles, myCustomEndpoint] }); -``` - -## Available Tools - -The following tools are available in this MCP server. - -### Resource `files`: - -- `retrieve_files` (`read`): Retrieves the data for a specific file by its ID. -- `list_files` (`read`): Retrieves a paginated list of files belonging to the authenticated user. -- `delete_files` (`write`): Deletes a specific file by its ID. -- `download_files` (`read`): Downloads the content of a specific file by its ID. -- `upload_files` (`write`): Uploads a file to the user's storage. The file size is limited to 10MB. - -### Resource `tasks`: - -- `retrieve_tasks` (`read`): Retrieves the data for a specific task by its ID. -- `list_tasks` (`read`): Retrieves a paginated list of tasks belonging to the authenticated user. - -### Resource `events`: - -- `list_events` (`read`): Retrieves a paginated list of events belonging to the authenticated user. - -### Resource `image_operations`: - -- `apply_effect_image_operations` (`write`): Creates a task to apply a specified visual effect to an image. -- `convert_image_operations` (`write`): Creates a task to convert an image file to a different format. -- `detect_documents_image_operations` (`write`): Creates a task to detect document boundaries within an image. -- `extract_text_image_operations` (`write`): Creates a task to extract text from a specified image file. -- `scan_image_operations` (`write`): Creates a task to scan an image file. - This is an equivalent operation for `detect-documents` and `warp` combined, additionally it can apply effects to the scanned image. -- `warp_image_operations` (`write`): Creates a task to apply perspective correction (warp) to an image based on detected document boundaries. - -### Resource `pdf_operations`: - -- `extract_pages_pdf_operations` (`write`): Creates a task to extract specific pages from a PDF file into a new PDF file. -- `merge_pdf_operations` (`write`): Creates a task to merge multiple PDF and/or image files into a single PDF file. -- `render_pdf_operations` (`write`): Creates a task to render specified pages of a PDF file as images. -- `split_pdf_operations` (`write`): Creates a task to split a PDF file into multiple single-page PDF files. diff --git a/packages/mcp-server/package.json b/packages/mcp-server/package.json index 80dfd6f..335258f 100644 --- a/packages/mcp-server/package.json +++ b/packages/mcp-server/package.json @@ -1,6 +1,6 @@ { "name": "scan-documents-mcp", - "version": "0.1.0-alpha.16", + "version": "0.1.0-alpha.17", "description": "The official MCP Server for the Scan Documents API", "author": "Scan Documents ", "types": "dist/index.d.ts", diff --git a/packages/mcp-server/src/code-tool.ts b/packages/mcp-server/src/code-tool.ts index ec4934c..6bff8b7 100644 --- a/packages/mcp-server/src/code-tool.ts +++ b/packages/mcp-server/src/code-tool.ts @@ -1,6 +1,6 @@ // File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -import { Metadata, ToolCallResult, asTextContentResult } from './tools/types'; +import { McpTool, Metadata, ToolCallResult, asTextContentResult } from './types'; import { Tool } from '@modelcontextprotocol/sdk/types.js'; import { readEnv } from './server'; import { WorkerSuccess } from './code-tool-types'; @@ -13,7 +13,7 @@ import { WorkerSuccess } from './code-tool-types'; * * @param endpoints - The endpoints to include in the list. */ -export async function codeTool() { +export function codeTool(): McpTool { const metadata: Metadata = { resource: 'all', operation: 'write', tags: [] }; const tool: Tool = { name: 'execute', diff --git a/packages/mcp-server/src/compat.ts b/packages/mcp-server/src/compat.ts deleted file mode 100644 index f84053c..0000000 --- a/packages/mcp-server/src/compat.ts +++ /dev/null @@ -1,483 +0,0 @@ -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import { z } from 'zod'; -import { Endpoint } from './tools'; - -export interface ClientCapabilities { - topLevelUnions: boolean; - validJson: boolean; - refs: boolean; - unions: boolean; - formats: boolean; - toolNameLength: number | undefined; -} - -export const defaultClientCapabilities: ClientCapabilities = { - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, -}; - -export const ClientType = z.enum(['openai-agents', 'claude', 'claude-code', 'cursor', 'infer']); -export type ClientType = z.infer; - -// Client presets for compatibility -// Note that these could change over time as models get better, so this is -// a best effort. -export const knownClients: Record, ClientCapabilities> = { - 'openai-agents': { - topLevelUnions: false, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }, - claude: { - topLevelUnions: true, - validJson: false, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }, - 'claude-code': { - topLevelUnions: false, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }, - cursor: { - topLevelUnions: false, - validJson: true, - refs: false, - unions: false, - formats: false, - toolNameLength: 50, - }, -}; - -/** - * Attempts to parse strings into JSON objects - */ -export function parseEmbeddedJSON(args: Record, schema: Record) { - let updated = false; - const newArgs: Record = Object.assign({}, args); - - for (const [key, value] of Object.entries(newArgs)) { - if (typeof value === 'string') { - try { - const parsed = JSON.parse(value); - // Only parse if result is a plain object (not array, null, or primitive) - if (parsed && typeof parsed === 'object' && !Array.isArray(parsed)) { - newArgs[key] = parsed; - updated = true; - } - } catch (e) { - // Not valid JSON, leave as is - } - } - } - - if (updated) { - return newArgs; - } - - return args; -} - -export type JSONSchema = { - type?: string; - properties?: Record; - required?: string[]; - anyOf?: JSONSchema[]; - $ref?: string; - $defs?: Record; - [key: string]: any; -}; - -/** - * Truncates tool names to the specified length while ensuring uniqueness. - * If truncation would cause duplicate names, appends a number to make them unique. - */ -export function truncateToolNames(names: string[], maxLength: number): Map { - if (maxLength <= 0) { - return new Map(); - } - - const renameMap = new Map(); - const usedNames = new Set(); - - const toTruncate = names.filter((name) => name.length > maxLength); - - if (toTruncate.length === 0) { - return renameMap; - } - - const willCollide = - new Set(toTruncate.map((name) => name.slice(0, maxLength - 1))).size < toTruncate.length; - - if (!willCollide) { - for (const name of toTruncate) { - const truncatedName = name.slice(0, maxLength); - renameMap.set(name, truncatedName); - } - } else { - const baseLength = maxLength - 1; - - for (const name of toTruncate) { - const baseName = name.slice(0, baseLength); - let counter = 1; - - while (usedNames.has(baseName + counter)) { - counter++; - } - - const finalName = baseName + counter; - renameMap.set(name, finalName); - usedNames.add(finalName); - } - } - - return renameMap; -} - -/** - * Removes top-level unions from a tool by splitting it into multiple tools, - * one for each variant in the union. - */ -export function removeTopLevelUnions(tool: Tool): Tool[] { - const inputSchema = tool.inputSchema as JSONSchema; - const variants = inputSchema.anyOf; - - if (!variants || !Array.isArray(variants) || variants.length === 0) { - return [tool]; - } - - const defs = inputSchema.$defs || {}; - - return variants.map((variant, index) => { - const variantSchema: JSONSchema = { - ...inputSchema, - ...variant, - type: 'object', - properties: { - ...(inputSchema.properties || {}), - ...(variant.properties || {}), - }, - }; - - delete variantSchema.anyOf; - - if (!variantSchema['description']) { - variantSchema['description'] = tool.description; - } - - const usedDefs = findUsedDefs(variant, defs); - if (Object.keys(usedDefs).length > 0) { - variantSchema.$defs = usedDefs; - } else { - delete variantSchema.$defs; - } - - return { - ...tool, - name: `${tool.name}_${toSnakeCase(variant['title'] || `variant${index + 1}`)}`, - description: variant['description'] || tool.description, - inputSchema: variantSchema, - } as Tool; - }); -} - -function findUsedDefs( - schema: JSONSchema, - defs: Record, - visited: Set = new Set(), -): Record { - const usedDefs: Record = {}; - - if (typeof schema !== 'object' || schema === null) { - return usedDefs; - } - - if (schema.$ref) { - const refParts = schema.$ref.split('/'); - if (refParts[0] === '#' && refParts[1] === '$defs' && refParts[2]) { - const defName = refParts[2]; - const def = defs[defName]; - if (def && !visited.has(schema.$ref)) { - usedDefs[defName] = def; - visited.add(schema.$ref); - Object.assign(usedDefs, findUsedDefs(def, defs, visited)); - visited.delete(schema.$ref); - } - } - return usedDefs; - } - - for (const key in schema) { - if (key !== '$defs' && typeof schema[key] === 'object' && schema[key] !== null) { - Object.assign(usedDefs, findUsedDefs(schema[key] as JSONSchema, defs, visited)); - } - } - - return usedDefs; -} - -// Export for testing -export { findUsedDefs }; - -/** - * Inlines all $refs in a schema, eliminating $defs. - * If a circular reference is detected, the circular property is removed. - */ -export function inlineRefs(schema: JSONSchema): JSONSchema { - if (!schema || typeof schema !== 'object') { - return schema; - } - - const clonedSchema = { ...schema }; - const defs: Record = schema.$defs || {}; - - delete clonedSchema.$defs; - - const result = inlineRefsRecursive(clonedSchema, defs, new Set()); - // The top level can never be null - return result === null ? {} : result; -} - -function inlineRefsRecursive( - schema: JSONSchema, - defs: Record, - refPath: Set, -): JSONSchema | null { - if (!schema || typeof schema !== 'object') { - return schema; - } - - if (Array.isArray(schema)) { - return schema.map((item) => { - const processed = inlineRefsRecursive(item, defs, refPath); - return processed === null ? {} : processed; - }) as JSONSchema; - } - - const result = { ...schema }; - - if ('$ref' in result && typeof result.$ref === 'string') { - if (result.$ref.startsWith('#/$defs/')) { - const refName = result.$ref.split('/').pop() as string; - const def = defs[refName]; - - // If we've already seen this ref in our path, we have a circular reference - if (refPath.has(result.$ref)) { - // For circular references, we completely remove the property - // by returning null. The parent will remove it. - return null; - } - - if (def) { - const newRefPath = new Set(refPath); - newRefPath.add(result.$ref); - - const inlinedDef = inlineRefsRecursive({ ...def }, defs, newRefPath); - - if (inlinedDef === null) { - return { ...result }; - } - - // Merge the inlined definition with the original schema's properties - // but preserve things like description, etc. - const { $ref, ...rest } = result; - return { ...inlinedDef, ...rest }; - } - } - - // Keep external refs as-is - return result; - } - - for (const key in result) { - if (result[key] && typeof result[key] === 'object') { - const processed = inlineRefsRecursive(result[key] as JSONSchema, defs, refPath); - if (processed === null) { - // Remove properties that would cause circular references - delete result[key]; - } else { - result[key] = processed; - } - } - } - - return result; -} - -/** - * Removes anyOf fields from a schema, using only the first variant. - */ -export function removeAnyOf(schema: JSONSchema): JSONSchema { - if (!schema || typeof schema !== 'object') { - return schema; - } - - if (Array.isArray(schema)) { - return schema.map((item) => removeAnyOf(item)) as JSONSchema; - } - - const result = { ...schema }; - - if ('anyOf' in result && Array.isArray(result.anyOf) && result.anyOf.length > 0) { - const firstVariant = result.anyOf[0]; - - if (firstVariant && typeof firstVariant === 'object') { - // Special handling for properties to ensure deep merge - if (firstVariant.properties && result.properties) { - result.properties = { - ...result.properties, - ...(firstVariant.properties as Record), - }; - } else if (firstVariant.properties) { - result.properties = { ...firstVariant.properties }; - } - - for (const key in firstVariant) { - if (key !== 'properties') { - result[key] = firstVariant[key]; - } - } - } - - delete result.anyOf; - } - - for (const key in result) { - if (result[key] && typeof result[key] === 'object') { - result[key] = removeAnyOf(result[key] as JSONSchema); - } - } - - return result; -} - -/** - * Removes format fields from a schema and appends them to the description. - */ -export function removeFormats(schema: JSONSchema, formatsCapability: boolean): JSONSchema { - if (formatsCapability) { - return schema; - } - - if (!schema || typeof schema !== 'object') { - return schema; - } - - if (Array.isArray(schema)) { - return schema.map((item) => removeFormats(item, formatsCapability)) as JSONSchema; - } - - const result = { ...schema }; - - if ('format' in result && typeof result['format'] === 'string') { - const formatStr = `(format: "${result['format']}")`; - - if ('description' in result && typeof result['description'] === 'string') { - result['description'] = `${result['description']} ${formatStr}`; - } else { - result['description'] = formatStr; - } - - delete result['format']; - } - - for (const key in result) { - if (result[key] && typeof result[key] === 'object') { - result[key] = removeFormats(result[key] as JSONSchema, formatsCapability); - } - } - - return result; -} - -/** - * Applies all compatibility transformations to the endpoints based on the provided capabilities. - */ -export function applyCompatibilityTransformations( - endpoints: Endpoint[], - capabilities: ClientCapabilities, -): Endpoint[] { - let transformedEndpoints = [...endpoints]; - - // Handle top-level unions first as this changes tool names - if (!capabilities.topLevelUnions) { - const newEndpoints: Endpoint[] = []; - - for (const endpoint of transformedEndpoints) { - const variantTools = removeTopLevelUnions(endpoint.tool); - - if (variantTools.length === 1) { - newEndpoints.push(endpoint); - } else { - for (const variantTool of variantTools) { - newEndpoints.push({ - ...endpoint, - tool: variantTool, - }); - } - } - } - - transformedEndpoints = newEndpoints; - } - - if (capabilities.toolNameLength) { - const toolNames = transformedEndpoints.map((endpoint) => endpoint.tool.name); - const renameMap = truncateToolNames(toolNames, capabilities.toolNameLength); - - transformedEndpoints = transformedEndpoints.map((endpoint) => ({ - ...endpoint, - tool: { - ...endpoint.tool, - name: renameMap.get(endpoint.tool.name) ?? endpoint.tool.name, - }, - })); - } - - if (!capabilities.refs || !capabilities.unions || !capabilities.formats) { - transformedEndpoints = transformedEndpoints.map((endpoint) => { - let schema = endpoint.tool.inputSchema as JSONSchema; - - if (!capabilities.refs) { - schema = inlineRefs(schema); - } - - if (!capabilities.unions) { - schema = removeAnyOf(schema); - } - - if (!capabilities.formats) { - schema = removeFormats(schema, capabilities.formats); - } - - return { - ...endpoint, - tool: { - ...endpoint.tool, - inputSchema: schema as typeof endpoint.tool.inputSchema, - }, - }; - }); - } - - return transformedEndpoints; -} - -function toSnakeCase(str: string): string { - return str - .replace(/\s+/g, '_') - .replace(/([a-z])([A-Z])/g, '$1_$2') - .toLowerCase(); -} diff --git a/packages/mcp-server/src/docs-search-tool.ts b/packages/mcp-server/src/docs-search-tool.ts index ed4b8ea..0f4f46b 100644 --- a/packages/mcp-server/src/docs-search-tool.ts +++ b/packages/mcp-server/src/docs-search-tool.ts @@ -1,6 +1,6 @@ // File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -import { Metadata, asTextContentResult } from './tools/types'; +import { Metadata, asTextContentResult } from './types'; import { Tool } from '@modelcontextprotocol/sdk/types.js'; diff --git a/packages/mcp-server/src/dynamic-tools.ts b/packages/mcp-server/src/dynamic-tools.ts deleted file mode 100644 index 9efd496..0000000 --- a/packages/mcp-server/src/dynamic-tools.ts +++ /dev/null @@ -1,159 +0,0 @@ -import ScanDocuments from 'scan-documents'; -import { Endpoint, asTextContentResult, ToolCallResult } from './tools/types'; -import { zodToJsonSchema } from 'zod-to-json-schema'; -import { z } from 'zod'; -import { Cabidela } from '@cloudflare/cabidela'; - -function zodToInputSchema(schema: z.ZodSchema) { - return { - type: 'object' as const, - ...(zodToJsonSchema(schema) as any), - }; -} - -/** - * A list of tools that expose all the endpoints in the API dynamically. - * - * Instead of exposing every endpoint as its own tool, which uses up too many tokens for LLMs to use at once, - * we expose a single tool that can be used to search for endpoints by name, resource, operation, or tag, and then - * a generic endpoint that can be used to invoke any endpoint with the provided arguments. - * - * @param endpoints - The endpoints to include in the list. - */ -export function dynamicTools(endpoints: Endpoint[]): Endpoint[] { - const listEndpointsSchema = z.object({ - search_query: z - .string() - .optional() - .describe( - 'An optional search query to filter the endpoints by. Provide a partial name, resource, operation, or tag to filter the endpoints returned.', - ), - }); - - const listEndpointsTool = { - metadata: { - resource: 'dynamic_tools', - operation: 'read' as const, - tags: [], - }, - tool: { - name: 'list_api_endpoints', - description: 'List or search for all endpoints in the Scan Documents TypeScript API', - inputSchema: zodToInputSchema(listEndpointsSchema), - }, - handler: async ( - client: ScanDocuments, - args: Record | undefined, - ): Promise => { - const query = args && listEndpointsSchema.parse(args).search_query?.trim(); - - const filteredEndpoints = - query && query.length > 0 ? - endpoints.filter((endpoint) => { - const fieldsToMatch = [ - endpoint.tool.name, - endpoint.tool.description, - endpoint.metadata.resource, - endpoint.metadata.operation, - ...endpoint.metadata.tags, - ]; - return fieldsToMatch.some((field) => field && field.toLowerCase().includes(query.toLowerCase())); - }) - : endpoints; - - return asTextContentResult({ - tools: filteredEndpoints.map(({ tool, metadata }) => ({ - name: tool.name, - description: tool.description, - resource: metadata.resource, - operation: metadata.operation, - tags: metadata.tags, - })), - }); - }, - }; - - const getEndpointSchema = z.object({ - endpoint: z.string().describe('The name of the endpoint to get the schema for.'), - }); - const getEndpointTool = { - metadata: { - resource: 'dynamic_tools', - operation: 'read' as const, - tags: [], - }, - tool: { - name: 'get_api_endpoint_schema', - description: - 'Get the schema for an endpoint in the Scan Documents TypeScript API. You can use the schema returned by this tool to invoke an endpoint with the `invoke_api_endpoint` tool.', - inputSchema: zodToInputSchema(getEndpointSchema), - }, - handler: async (client: ScanDocuments, args: Record | undefined) => { - if (!args) { - throw new Error('No endpoint provided'); - } - const endpointName = getEndpointSchema.parse(args).endpoint; - - const endpoint = endpoints.find((e) => e.tool.name === endpointName); - if (!endpoint) { - throw new Error(`Endpoint ${endpointName} not found`); - } - return asTextContentResult(endpoint.tool); - }, - }; - - const invokeEndpointSchema = z.object({ - endpoint_name: z.string().describe('The name of the endpoint to invoke.'), - args: z - .record(z.string(), z.any()) - .describe( - 'The arguments to pass to the endpoint. This must match the schema returned by the `get_api_endpoint_schema` tool.', - ), - }); - - const invokeEndpointTool = { - metadata: { - resource: 'dynamic_tools', - operation: 'write' as const, - tags: [], - }, - tool: { - name: 'invoke_api_endpoint', - description: - 'Invoke an endpoint in the Scan Documents TypeScript API. Note: use the `list_api_endpoints` tool to get the list of endpoints and `get_api_endpoint_schema` tool to get the schema for an endpoint.', - inputSchema: zodToInputSchema(invokeEndpointSchema), - }, - handler: async ( - client: ScanDocuments, - args: Record | undefined, - ): Promise => { - if (!args) { - throw new Error('No endpoint provided'); - } - const { success, data, error } = invokeEndpointSchema.safeParse(args); - if (!success) { - throw new Error(`Invalid arguments for endpoint. ${error?.format()}`); - } - const { endpoint_name, args: endpointArgs } = data; - - const endpoint = endpoints.find((e) => e.tool.name === endpoint_name); - if (!endpoint) { - throw new Error( - `Endpoint ${endpoint_name} not found. Use the \`list_api_endpoints\` tool to get the list of available endpoints.`, - ); - } - - try { - // Try to validate the arguments for a better error message - const cabidela = new Cabidela(endpoint.tool.inputSchema, { fullErrors: true }); - cabidela.validate(endpointArgs); - } catch (error) { - throw new Error(`Invalid arguments for endpoint ${endpoint_name}:\n${error}`); - } - - return await endpoint.handler(client, endpointArgs); - }, - }; - - return [getEndpointTool, listEndpointsTool, invokeEndpointTool]; -} diff --git a/packages/mcp-server/src/filtering.ts b/packages/mcp-server/src/filtering.ts deleted file mode 100644 index eaae0fc..0000000 --- a/packages/mcp-server/src/filtering.ts +++ /dev/null @@ -1,18 +0,0 @@ -// @ts-nocheck -import initJq from 'jq-web'; - -export async function maybeFilter(jqFilter: unknown | undefined, response: any): Promise { - if (jqFilter && typeof jqFilter === 'string') { - return await jq(response, jqFilter); - } else { - return response; - } -} - -async function jq(json: any, jqFilter: string) { - return (await initJq).json(json, jqFilter); -} - -export function isJqError(error: any): error is Error { - return error instanceof Error && 'stderr' in error; -} diff --git a/packages/mcp-server/src/http.ts b/packages/mcp-server/src/http.ts index 8451700..2366d8f 100644 --- a/packages/mcp-server/src/http.ts +++ b/packages/mcp-server/src/http.ts @@ -4,38 +4,21 @@ import { McpServer } from '@modelcontextprotocol/sdk/server/mcp'; import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; import express from 'express'; -import { fromError } from 'zod-validation-error/v3'; -import { McpOptions, parseQueryOptions } from './options'; +import { McpOptions } from './options'; import { ClientOptions, initMcpServer, newMcpServer } from './server'; import { parseAuthHeaders } from './headers'; const newServer = ({ clientOptions, - mcpOptions: defaultMcpOptions, req, res, }: { clientOptions: ClientOptions; - mcpOptions: McpOptions; req: express.Request; res: express.Response; }): McpServer | null => { const server = newMcpServer(); - let mcpOptions: McpOptions; - try { - mcpOptions = parseQueryOptions(defaultMcpOptions, req.query); - } catch (error) { - res.status(400).json({ - jsonrpc: '2.0', - error: { - code: -32000, - message: `Invalid request: ${fromError(error)}`, - }, - }); - return null; - } - try { const authOptions = parseAuthHeaders(req); initMcpServer({ @@ -44,7 +27,6 @@ const newServer = ({ ...clientOptions, ...authOptions, }, - mcpOptions, }); } catch (error) { res.status(401).json({ diff --git a/packages/mcp-server/src/index.ts b/packages/mcp-server/src/index.ts index 4850a0e..0f6dd42 100644 --- a/packages/mcp-server/src/index.ts +++ b/packages/mcp-server/src/index.ts @@ -1,20 +1,15 @@ #!/usr/bin/env node import { selectTools } from './server'; -import { Endpoint, endpoints } from './tools'; import { McpOptions, parseCLIOptions } from './options'; import { launchStdioServer } from './stdio'; import { launchStreamableHTTPServer } from './http'; +import type { McpTool } from './types'; async function main() { const options = parseOptionsOrError(); - if (options.list) { - listAllTools(); - return; - } - - const selectedTools = await selectToolsOrError(endpoints, options); + const selectedTools = await selectToolsOrError(options); console.error( `MCP Server starting with ${selectedTools.length} tools:`, @@ -23,7 +18,7 @@ async function main() { switch (options.transport) { case 'stdio': - await launchStdioServer(options); + await launchStdioServer(); break; case 'http': await launchStreamableHTTPServer(options, options.port ?? options.socket); @@ -47,9 +42,9 @@ function parseOptionsOrError() { } } -async function selectToolsOrError(endpoints: Endpoint[], options: McpOptions): Promise { +async function selectToolsOrError(options: McpOptions): Promise { try { - const includedTools = await selectTools(endpoints, options); + const includedTools = selectTools(options); if (includedTools.length === 0) { console.error('No tools match the provided filters.'); process.exit(1); @@ -64,45 +59,3 @@ async function selectToolsOrError(endpoints: Endpoint[], options: McpOptions): P process.exit(1); } } - -function listAllTools() { - if (endpoints.length === 0) { - console.log('No tools available.'); - return; - } - console.log('Available tools:\n'); - - // Group endpoints by resource - const resourceGroups = new Map(); - - for (const endpoint of endpoints) { - const resource = endpoint.metadata.resource; - if (!resourceGroups.has(resource)) { - resourceGroups.set(resource, []); - } - resourceGroups.get(resource)!.push(endpoint); - } - - // Sort resources alphabetically - const sortedResources = Array.from(resourceGroups.keys()).sort(); - - // Display hierarchically by resource - for (const resource of sortedResources) { - console.log(`Resource: ${resource}`); - - const resourceEndpoints = resourceGroups.get(resource)!; - // Sort endpoints by tool name - resourceEndpoints.sort((a, b) => a.tool.name.localeCompare(b.tool.name)); - - for (const endpoint of resourceEndpoints) { - const { - tool, - metadata: { operation, tags }, - } = endpoint; - - console.log(` - ${tool.name} (${operation}) ${tags.length > 0 ? `tags: ${tags.join(', ')}` : ''}`); - console.log(` Description: ${tool.description}`); - } - console.log(''); - } -} diff --git a/packages/mcp-server/src/options.ts b/packages/mcp-server/src/options.ts index b6ff597..6c8bb8d 100644 --- a/packages/mcp-server/src/options.ts +++ b/packages/mcp-server/src/options.ts @@ -2,140 +2,31 @@ import qs from 'qs'; import yargs from 'yargs'; import { hideBin } from 'yargs/helpers'; import z from 'zod'; -import { endpoints, Filter } from './tools'; -import { ClientCapabilities, knownClients, ClientType } from './compat'; export type CLIOptions = McpOptions & { - list: boolean; transport: 'stdio' | 'http'; port: number | undefined; socket: string | undefined; }; export type McpOptions = { - client?: ClientType | undefined; - includeDynamicTools?: boolean | undefined; - includeAllTools?: boolean | undefined; - includeCodeTools?: boolean | undefined; includeDocsTools?: boolean | undefined; - filters?: Filter[] | undefined; - capabilities?: Partial | undefined; }; -const CAPABILITY_CHOICES = [ - 'top-level-unions', - 'valid-json', - 'refs', - 'unions', - 'formats', - 'tool-name-length', -] as const; - -type Capability = (typeof CAPABILITY_CHOICES)[number]; - -function parseCapabilityValue(cap: string): { name: Capability; value?: number } { - if (cap.startsWith('tool-name-length=')) { - const parts = cap.split('='); - if (parts.length === 2) { - const length = parseInt(parts[1]!, 10); - if (!isNaN(length)) { - return { name: 'tool-name-length', value: length }; - } - throw new Error(`Invalid tool-name-length value: ${parts[1]}. Expected a number.`); - } - throw new Error(`Invalid format for tool-name-length. Expected tool-name-length=N.`); - } - if (!CAPABILITY_CHOICES.includes(cap as Capability)) { - throw new Error(`Unknown capability: ${cap}. Valid capabilities are: ${CAPABILITY_CHOICES.join(', ')}`); - } - return { name: cap as Capability }; -} - export function parseCLIOptions(): CLIOptions { const opts = yargs(hideBin(process.argv)) .option('tools', { type: 'string', array: true, - choices: ['dynamic', 'all', 'code', 'docs'], + choices: ['code', 'docs'], description: 'Use dynamic tools or all tools', }) .option('no-tools', { type: 'string', array: true, - choices: ['dynamic', 'all', 'code', 'docs'], + choices: ['code', 'docs'], description: 'Do not use any dynamic or all tools', }) - .option('tool', { - type: 'string', - array: true, - description: 'Include tools matching the specified names', - }) - .option('resource', { - type: 'string', - array: true, - description: 'Include tools matching the specified resources', - }) - .option('operation', { - type: 'string', - array: true, - choices: ['read', 'write'], - description: 'Include tools matching the specified operations', - }) - .option('tag', { - type: 'string', - array: true, - description: 'Include tools with the specified tags', - }) - .option('no-tool', { - type: 'string', - array: true, - description: 'Exclude tools matching the specified names', - }) - .option('no-resource', { - type: 'string', - array: true, - description: 'Exclude tools matching the specified resources', - }) - .option('no-operation', { - type: 'string', - array: true, - description: 'Exclude tools matching the specified operations', - }) - .option('no-tag', { - type: 'string', - array: true, - description: 'Exclude tools with the specified tags', - }) - .option('list', { - type: 'boolean', - description: 'List all tools and exit', - }) - .option('client', { - type: 'string', - choices: Object.keys(knownClients), - description: 'Specify the MCP client being used', - }) - .option('capability', { - type: 'string', - array: true, - description: 'Specify client capabilities', - coerce: (values: string[]) => { - return values.flatMap((v) => v.split(',')); - }, - }) - .option('no-capability', { - type: 'string', - array: true, - description: 'Unset client capabilities', - choices: CAPABILITY_CHOICES, - coerce: (values: string[]) => { - return values.flatMap((v) => v.split(',')); - }, - }) - .option('describe-capabilities', { - type: 'boolean', - description: 'Print detailed explanation of client capabilities and exit', - }) .option('transport', { type: 'string', choices: ['stdio', 'http'], @@ -152,122 +43,19 @@ export function parseCLIOptions(): CLIOptions { }) .help(); - for (const [command, desc] of examples()) { - opts.example(command, desc); - } - const argv = opts.parseSync(); - // Handle describe-capabilities flag - if (argv.describeCapabilities) { - console.log(getCapabilitiesExplanation()); - process.exit(0); - } - - const filters: Filter[] = []; - - // Helper function to support comma-separated values - const splitValues = (values: string[] | undefined): string[] => { - if (!values) return []; - return values.flatMap((v) => v.split(',')); - }; - - for (const tag of splitValues(argv.tag)) { - filters.push({ type: 'tag', op: 'include', value: tag }); - } - - for (const tag of splitValues(argv.noTag)) { - filters.push({ type: 'tag', op: 'exclude', value: tag }); - } - - for (const resource of splitValues(argv.resource)) { - filters.push({ type: 'resource', op: 'include', value: resource }); - } - - for (const resource of splitValues(argv.noResource)) { - filters.push({ type: 'resource', op: 'exclude', value: resource }); - } - - for (const tool of splitValues(argv.tool)) { - filters.push({ type: 'tool', op: 'include', value: tool }); - } - - for (const tool of splitValues(argv.noTool)) { - filters.push({ type: 'tool', op: 'exclude', value: tool }); - } - - for (const operation of splitValues(argv.operation)) { - filters.push({ type: 'operation', op: 'include', value: operation }); - } - - for (const operation of splitValues(argv.noOperation)) { - filters.push({ type: 'operation', op: 'exclude', value: operation }); - } - - // Parse client capabilities - const clientCapabilities: Partial = {}; - - // Apply individual capability overrides - if (Array.isArray(argv.capability)) { - for (const cap of argv.capability) { - const parsedCap = parseCapabilityValue(cap); - if (parsedCap.name === 'top-level-unions') { - clientCapabilities.topLevelUnions = true; - } else if (parsedCap.name === 'valid-json') { - clientCapabilities.validJson = true; - } else if (parsedCap.name === 'refs') { - clientCapabilities.refs = true; - } else if (parsedCap.name === 'unions') { - clientCapabilities.unions = true; - } else if (parsedCap.name === 'formats') { - clientCapabilities.formats = true; - } else if (parsedCap.name === 'tool-name-length') { - clientCapabilities.toolNameLength = parsedCap.value; - } - } - } - - // Handle no-capability options to unset capabilities - if (Array.isArray(argv.noCapability)) { - for (const cap of argv.noCapability) { - if (cap === 'top-level-unions') { - clientCapabilities.topLevelUnions = false; - } else if (cap === 'valid-json') { - clientCapabilities.validJson = false; - } else if (cap === 'refs') { - clientCapabilities.refs = false; - } else if (cap === 'unions') { - clientCapabilities.unions = false; - } else if (cap === 'formats') { - clientCapabilities.formats = false; - } else if (cap === 'tool-name-length') { - clientCapabilities.toolNameLength = undefined; - } - } - } - - const shouldIncludeToolType = (toolType: 'dynamic' | 'all' | 'code' | 'docs') => + const shouldIncludeToolType = (toolType: 'code' | 'docs') => argv.noTools?.includes(toolType) ? false : argv.tools?.includes(toolType) ? true : undefined; - const includeDynamicTools = shouldIncludeToolType('dynamic'); - const includeAllTools = shouldIncludeToolType('all'); - const includeCodeTools = shouldIncludeToolType('code'); const includeDocsTools = shouldIncludeToolType('docs'); const transport = argv.transport as 'stdio' | 'http'; - const client = argv.client as ClientType; return { - client: client && client !== 'infer' && knownClients[client] ? client : undefined, - includeDynamicTools, - includeAllTools, - includeCodeTools, includeDocsTools, - filters, - capabilities: clientCapabilities, - list: argv.list || false, transport, port: argv.port, socket: argv.socket, @@ -284,190 +72,21 @@ const coerceArray = (zodType: T) => ); const QueryOptions = z.object({ - tools: coerceArray(z.enum(['dynamic', 'all', 'code', 'docs'])).describe('Specify which MCP tools to use'), - no_tools: coerceArray(z.enum(['dynamic', 'all', 'code', 'docs'])).describe( - 'Specify which MCP tools to not use.', - ), + tools: coerceArray(z.enum(['code', 'docs'])).describe('Specify which MCP tools to use'), + no_tools: coerceArray(z.enum(['code', 'docs'])).describe('Specify which MCP tools to not use.'), tool: coerceArray(z.string()).describe('Include tools matching the specified names'), - resource: coerceArray(z.string()).describe('Include tools matching the specified resources'), - operation: coerceArray(z.enum(['read', 'write'])).describe( - 'Include tools matching the specified operations', - ), - tag: coerceArray(z.string()).describe('Include tools with the specified tags'), - no_tool: coerceArray(z.string()).describe('Exclude tools matching the specified names'), - no_resource: coerceArray(z.string()).describe('Exclude tools matching the specified resources'), - no_operation: coerceArray(z.enum(['read', 'write'])).describe( - 'Exclude tools matching the specified operations', - ), - no_tag: coerceArray(z.string()).describe('Exclude tools with the specified tags'), - client: ClientType.optional().describe('Specify the MCP client being used'), - capability: coerceArray(z.string()).describe('Specify client capabilities'), - no_capability: coerceArray(z.enum(CAPABILITY_CHOICES)).describe('Unset client capabilities'), }); export function parseQueryOptions(defaultOptions: McpOptions, query: unknown): McpOptions { const queryObject = typeof query === 'string' ? qs.parse(query) : query; const queryOptions = QueryOptions.parse(queryObject); - const filters: Filter[] = [...(defaultOptions.filters ?? [])]; - - for (const resource of queryOptions.resource || []) { - filters.push({ type: 'resource', op: 'include', value: resource }); - } - for (const operation of queryOptions.operation || []) { - filters.push({ type: 'operation', op: 'include', value: operation }); - } - for (const tag of queryOptions.tag || []) { - filters.push({ type: 'tag', op: 'include', value: tag }); - } - for (const tool of queryOptions.tool || []) { - filters.push({ type: 'tool', op: 'include', value: tool }); - } - for (const resource of queryOptions.no_resource || []) { - filters.push({ type: 'resource', op: 'exclude', value: resource }); - } - for (const operation of queryOptions.no_operation || []) { - filters.push({ type: 'operation', op: 'exclude', value: operation }); - } - for (const tag of queryOptions.no_tag || []) { - filters.push({ type: 'tag', op: 'exclude', value: tag }); - } - for (const tool of queryOptions.no_tool || []) { - filters.push({ type: 'tool', op: 'exclude', value: tool }); - } - - // Parse client capabilities - const clientCapabilities: Partial = { ...defaultOptions.capabilities }; - - for (const cap of queryOptions.capability || []) { - const parsed = parseCapabilityValue(cap); - if (parsed.name === 'top-level-unions') { - clientCapabilities.topLevelUnions = true; - } else if (parsed.name === 'valid-json') { - clientCapabilities.validJson = true; - } else if (parsed.name === 'refs') { - clientCapabilities.refs = true; - } else if (parsed.name === 'unions') { - clientCapabilities.unions = true; - } else if (parsed.name === 'formats') { - clientCapabilities.formats = true; - } else if (parsed.name === 'tool-name-length') { - clientCapabilities.toolNameLength = parsed.value; - } - } - - for (const cap of queryOptions.no_capability || []) { - if (cap === 'top-level-unions') { - clientCapabilities.topLevelUnions = false; - } else if (cap === 'valid-json') { - clientCapabilities.validJson = false; - } else if (cap === 'refs') { - clientCapabilities.refs = false; - } else if (cap === 'unions') { - clientCapabilities.unions = false; - } else if (cap === 'formats') { - clientCapabilities.formats = false; - } else if (cap === 'tool-name-length') { - clientCapabilities.toolNameLength = undefined; - } - } - - let dynamicTools: boolean | undefined = - queryOptions.no_tools && queryOptions.no_tools?.includes('dynamic') ? false - : queryOptions.tools?.includes('dynamic') ? true - : defaultOptions.includeDynamicTools; - - let allTools: boolean | undefined = - queryOptions.no_tools && queryOptions.no_tools?.includes('all') ? false - : queryOptions.tools?.includes('all') ? true - : defaultOptions.includeAllTools; - let docsTools: boolean | undefined = queryOptions.no_tools && queryOptions.no_tools?.includes('docs') ? false : queryOptions.tools?.includes('docs') ? true : defaultOptions.includeDocsTools; - let codeTools: boolean | undefined = - queryOptions.no_tools && queryOptions.no_tools?.includes('code') ? false - : queryOptions.tools?.includes('code') && defaultOptions.includeCodeTools ? true - : defaultOptions.includeCodeTools; - return { - client: queryOptions.client ?? defaultOptions.client, - includeDynamicTools: dynamicTools, - includeAllTools: allTools, - includeCodeTools: codeTools, includeDocsTools: docsTools, - filters, - capabilities: clientCapabilities, }; } - -function getCapabilitiesExplanation(): string { - return ` -Client Capabilities Explanation: - -Different Language Models (LLMs) and the MCP clients that use them have varying limitations in how they handle tool schemas. Capability flags allow you to inform the MCP server about these limitations. - -When a capability flag is set to false, the MCP server will automatically adjust the tool schemas to work around that limitation, ensuring broader compatibility. - -Available Capabilities: - -# top-level-unions -Some clients/LLMs do not support JSON schemas with a union type (anyOf) at the root level. If a client lacks this capability, the MCP server splits tools with top-level unions into multiple separate tools, one for each variant in the union. - -# refs -Some clients/LLMs do not support $ref pointers for schema reuse. If a client lacks this capability, the MCP server automatically inlines all references ($defs) directly into the schema. Properties that would cause circular references are removed during this process. - -# valid-json -Some clients/LLMs may incorrectly send arguments as a JSON-encoded string instead of a proper JSON object. If a client *has* this capability, the MCP server will attempt to parse string values as JSON if the initial validation against the schema fails. - -# unions -Some clients/LLMs do not support union types (anyOf) in JSON schemas. If a client lacks this capability, the MCP server removes all anyOf fields and uses only the first variant as the schema. - -# formats -Some clients/LLMs do not support the 'format' keyword in JSON Schema specifications. If a client lacks this capability, the MCP server removes all format fields and appends the format information to the field's description in parentheses. - -# tool-name-length=N -Some clients/LLMs impose a maximum length on tool names. If this capability is set, the MCP server will automatically truncate tool names exceeding the specified length (N), ensuring uniqueness by appending numbers if necessary. - -Client Presets (--client): -Presets like '--client=openai-agents' or '--client=cursor' automatically configure these capabilities based on current known limitations of those clients, simplifying setup. - -Current presets: -${JSON.stringify(knownClients, null, 2)} - `; -} - -function examples(): [string, string][] { - const firstEndpoint = endpoints[0]!; - const secondEndpoint = - endpoints.find((e) => e.metadata.resource !== firstEndpoint.metadata.resource) || endpoints[1]; - const tag = endpoints.find((e) => e.metadata.tags.length > 0)?.metadata.tags[0]; - const otherEndpoint = secondEndpoint || firstEndpoint; - - return [ - [ - `--tool="${firstEndpoint.tool.name}" ${secondEndpoint ? `--tool="${secondEndpoint.tool.name}"` : ''}`, - 'Include tools by name', - ], - [ - `--resource="${firstEndpoint.metadata.resource}" --operation="read"`, - 'Filter by resource and operation', - ], - [ - `--resource="${otherEndpoint.metadata.resource}*" --no-tool="${otherEndpoint.tool.name}"`, - 'Use resource wildcards and exclusions', - ], - [`--client="cursor"`, 'Adjust schemas to be more compatible with Cursor'], - [ - `--capability="top-level-unions" --capability="tool-name-length=40"`, - 'Specify individual client capabilities', - ], - [ - `--client="cursor" --no-capability="tool-name-length"`, - 'Use cursor client preset but remove tool name length limit', - ], - ...(tag ? [[`--tag="${tag}"`, 'Filter based on tags'] as [string, string]] : []), - ]; -} diff --git a/packages/mcp-server/src/server.ts b/packages/mcp-server/src/server.ts index 082974f..b307188 100644 --- a/packages/mcp-server/src/server.ts +++ b/packages/mcp-server/src/server.ts @@ -2,39 +2,26 @@ import { Server } from '@modelcontextprotocol/sdk/server/index.js'; import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; -import { Endpoint, endpoints, HandlerFunction, query } from './tools'; import { CallToolRequestSchema, ListToolsRequestSchema, SetLevelRequestSchema, - Implementation, - Tool, } from '@modelcontextprotocol/sdk/types.js'; import { ClientOptions } from 'scan-documents'; import ScanDocuments from 'scan-documents'; -import { - applyCompatibilityTransformations, - ClientCapabilities, - defaultClientCapabilities, - knownClients, - parseEmbeddedJSON, -} from './compat'; -import { dynamicTools } from './dynamic-tools'; import { codeTool } from './code-tool'; import docsSearchTool from './docs-search-tool'; import { McpOptions } from './options'; +import { HandlerFunction, McpTool } from './types'; export { McpOptions } from './options'; -export { ClientType } from './compat'; -export { Filter } from './tools'; export { ClientOptions } from 'scan-documents'; -export { endpoints } from './tools'; export const newMcpServer = () => new McpServer( { name: 'scan_documents_api', - version: '0.1.0-alpha.16', + version: '0.1.0-alpha.17', }, { capabilities: { tools: {}, logging: {} } }, ); @@ -52,25 +39,6 @@ export function initMcpServer(params: { mcpOptions?: McpOptions; }) { const server = params.server instanceof McpServer ? params.server.server : params.server; - const mcpOptions = params.mcpOptions ?? {}; - - let providedEndpoints: Endpoint[] | null = null; - let endpointMap: Record | null = null; - - const initTools = async (implementation?: Implementation) => { - if (implementation && (!mcpOptions.client || mcpOptions.client === 'infer')) { - mcpOptions.client = - implementation.name.toLowerCase().includes('claude') ? 'claude' - : implementation.name.toLowerCase().includes('cursor') ? 'cursor' - : undefined; - mcpOptions.capabilities = { - ...(mcpOptions.client && knownClients[mcpOptions.client]), - ...mcpOptions.capabilities, - }; - } - providedEndpoints ??= await selectTools(endpoints, mcpOptions); - endpointMap ??= Object.fromEntries(providedEndpoints.map((endpoint) => [endpoint.tool.name, endpoint])); - }; const logAtLevel = (level: 'debug' | 'info' | 'warning' | 'error') => @@ -96,26 +64,23 @@ export function initMcpServer(params: { }, }); + const providedTools = selectTools(params.mcpOptions); + const toolMap = Object.fromEntries(providedTools.map((mcpTool) => [mcpTool.tool.name, mcpTool])); + server.setRequestHandler(ListToolsRequestSchema, async () => { - if (providedEndpoints === null) { - await initTools(server.getClientVersion()); - } return { - tools: providedEndpoints!.map((endpoint) => endpoint.tool), + tools: providedTools.map((mcpTool) => mcpTool.tool), }; }); server.setRequestHandler(CallToolRequestSchema, async (request) => { - if (endpointMap === null) { - await initTools(server.getClientVersion()); - } const { name, arguments: args } = request.params; - const endpoint = endpointMap![name]; - if (!endpoint) { + const mcpTool = toolMap[name]; + if (!mcpTool) { throw new Error(`Unknown tool: ${name}`); } - return executeHandler(endpoint.tool, endpoint.handler, client, args, mcpOptions.capabilities); + return executeHandler(mcpTool.handler, client, args); }); server.setRequestHandler(SetLevelRequestSchema, async (request) => { @@ -145,47 +110,22 @@ export function initMcpServer(params: { /** * Selects the tools to include in the MCP Server based on the provided options. */ -export async function selectTools(endpoints: Endpoint[], options?: McpOptions): Promise { - const filteredEndpoints = query(options?.filters ?? [], endpoints); - - let includedTools = filteredEndpoints.slice(); - - if (includedTools.length > 0) { - if (options?.includeDynamicTools) { - includedTools = dynamicTools(includedTools); - } - } else { - if (options?.includeAllTools) { - includedTools = endpoints.slice(); - } else if (options?.includeDynamicTools) { - includedTools = dynamicTools(endpoints); - } else if (options?.includeCodeTools) { - includedTools = [await codeTool()]; - } else { - includedTools = endpoints.slice(); - } - } +export function selectTools(options?: McpOptions): McpTool[] { + const includedTools = [codeTool()]; if (options?.includeDocsTools ?? true) { includedTools.push(docsSearchTool); } - const capabilities = { ...defaultClientCapabilities, ...options?.capabilities }; - return applyCompatibilityTransformations(includedTools, capabilities); + return includedTools; } /** * Runs the provided handler with the given client and arguments. */ export async function executeHandler( - tool: Tool, handler: HandlerFunction, client: ScanDocuments, args: Record | undefined, - compatibilityOptions?: Partial, ) { - const options = { ...defaultClientCapabilities, ...compatibilityOptions }; - if (!options.validJson && args) { - args = parseEmbeddedJSON(args, tool.inputSchema); - } return await handler(client, args || {}); } diff --git a/packages/mcp-server/src/stdio.ts b/packages/mcp-server/src/stdio.ts index d902a5b..f07696f 100644 --- a/packages/mcp-server/src/stdio.ts +++ b/packages/mcp-server/src/stdio.ts @@ -1,11 +1,10 @@ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; import { initMcpServer, newMcpServer } from './server'; -import { McpOptions } from './options'; -export const launchStdioServer = async (options: McpOptions) => { +export const launchStdioServer = async () => { const server = newMcpServer(); - initMcpServer({ server, mcpOptions: options }); + initMcpServer({ server }); const transport = new StdioServerTransport(); await server.connect(transport); diff --git a/packages/mcp-server/src/tools.ts b/packages/mcp-server/src/tools.ts deleted file mode 100644 index 7e516de..0000000 --- a/packages/mcp-server/src/tools.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './tools/index'; diff --git a/packages/mcp-server/src/tools/events/list-events.ts b/packages/mcp-server/src/tools/events/list-events.ts deleted file mode 100644 index 826bfad..0000000 --- a/packages/mcp-server/src/tools/events/list-events.ts +++ /dev/null @@ -1,49 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'events', - operation: 'read', - tags: [], - httpMethod: 'get', - httpPath: '/v1/events', - operationId: 'listEvents', -}; - -export const tool: Tool = { - name: 'list_events', - description: 'Retrieves a paginated list of events belonging to the authenticated user.', - inputSchema: { - type: 'object', - properties: { - from: { - type: 'string', - }, - take: { - type: 'number', - }, - }, - required: [], - }, - annotations: { - readOnlyHint: true, - }, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const body = args as any; - try { - return asTextContentResult(await client.events.list(body)); - } catch (error) { - if (error instanceof ScanDocuments.APIError) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/files/delete-files.ts b/packages/mcp-server/src/tools/files/delete-files.ts deleted file mode 100644 index a2a81c5..0000000 --- a/packages/mcp-server/src/tools/files/delete-files.ts +++ /dev/null @@ -1,40 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'files', - operation: 'write', - tags: [], - httpMethod: 'delete', - httpPath: '/v1/files/{id}', - operationId: 'deleteFile', -}; - -export const tool: Tool = { - name: 'delete_files', - description: 'Deletes a specific file by its ID.', - inputSchema: { - type: 'object', - properties: { - id: { - type: 'string', - }, - }, - required: ['id'], - }, - annotations: { - idempotentHint: true, - }, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const { id, ...body } = args as any; - const response = await client.files.delete(id).asResponse(); - return asTextContentResult(await response.text()); -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/files/download-files.ts b/packages/mcp-server/src/tools/files/download-files.ts deleted file mode 100644 index 5762e17..0000000 --- a/packages/mcp-server/src/tools/files/download-files.ts +++ /dev/null @@ -1,39 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asBinaryContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'files', - operation: 'read', - tags: [], - httpMethod: 'get', - httpPath: '/v1/files/{id}/download', - operationId: 'downloadFile', -}; - -export const tool: Tool = { - name: 'download_files', - description: 'Downloads the content of a specific file by its ID.', - inputSchema: { - type: 'object', - properties: { - id: { - type: 'string', - }, - }, - required: ['id'], - }, - annotations: { - readOnlyHint: true, - }, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const { id, ...body } = args as any; - return asBinaryContentResult(await client.files.download(id).asResponse()); -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/files/list-files.ts b/packages/mcp-server/src/tools/files/list-files.ts deleted file mode 100644 index 999f472..0000000 --- a/packages/mcp-server/src/tools/files/list-files.ts +++ /dev/null @@ -1,59 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { isJqError, maybeFilter } from 'scan-documents-mcp/filtering'; -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'files', - operation: 'read', - tags: [], - httpMethod: 'get', - httpPath: '/v1/files', - operationId: 'listFiles', -}; - -export const tool: Tool = { - name: 'list_files', - description: - "When using this tool, always use the `jq_filter` parameter to reduce the response size and improve performance.\n\nOnly omit if you're sure you don't need the data.\n\nRetrieves a paginated list of files belonging to the authenticated user.\n\n# Response Schema\n```json\n{\n $ref: '#/$defs/file_list_response',\n $defs: {\n file_list_response: {\n type: 'object',\n properties: {\n data: {\n type: 'array',\n description: 'The list of files',\n items: {\n $ref: '#/$defs/file'\n }\n },\n links: {\n type: 'object',\n properties: {\n next: {\n type: 'string',\n description: 'The URL to the next page of results'\n },\n previous: {\n type: 'string',\n description: 'The URL to the previous page of results'\n }\n },\n required: [ 'next',\n 'previous'\n ]\n }\n },\n required: [ 'data',\n 'links'\n ]\n },\n file: {\n anyOf: [ {\n type: 'object',\n title: 'Image Response',\n description: 'The response for an image file',\n properties: {\n id: {\n type: 'string',\n description: 'The id of the file'\n },\n created_at: {\n type: 'string',\n description: 'The creation date of the file in ISO format'\n },\n name: {\n type: 'string',\n description: 'The name of the file'\n },\n properties: {\n type: 'object',\n properties: {\n height: {\n type: 'number',\n description: 'The height of the image in pixels'\n },\n size: {\n type: 'number',\n description: 'The size of the image in bytes'\n },\n width: {\n type: 'number',\n description: 'The width of the image in pixels'\n }\n },\n required: [ 'height',\n 'size',\n 'width'\n ]\n },\n task_id: {\n type: 'string',\n description: 'The id of the task that generated this file, if any'\n },\n type: {\n type: 'string',\n description: 'The MIME type of the file',\n enum: [ 'image/png',\n 'image/jpeg',\n 'image/webp'\n ]\n }\n },\n required: [ 'id',\n 'created_at',\n 'name',\n 'properties',\n 'task_id',\n 'type'\n ]\n },\n {\n type: 'object',\n title: 'Document Response',\n description: 'The response for a document file',\n properties: {\n id: {\n type: 'string',\n description: 'The id of the file'\n },\n created_at: {\n type: 'string',\n description: 'The creation date of the file in ISO format'\n },\n name: {\n type: 'string',\n description: 'The name of the file'\n },\n properties: {\n type: 'object',\n properties: {\n page_count: {\n type: 'number',\n description: 'The number of pages in the document'\n },\n size: {\n type: 'number',\n description: 'The size of the document in bytes'\n }\n },\n required: [ 'page_count',\n 'size'\n ]\n },\n task_id: {\n type: 'string',\n description: 'The id of the task that generated this file, if any'\n },\n type: {\n type: 'string',\n description: 'The MIME type of the file',\n enum: [ 'application/pdf'\n ]\n }\n },\n required: [ 'id',\n 'created_at',\n 'name',\n 'properties',\n 'task_id',\n 'type'\n ]\n }\n ],\n title: 'File Response',\n description: 'The response for a file. Properties depend on the file type.'\n }\n }\n}\n```", - inputSchema: { - type: 'object', - properties: { - from: { - type: 'string', - description: 'The id of the file from which to start the search', - }, - take: { - type: 'number', - description: 'The number of elements to retrieve', - }, - jq_filter: { - type: 'string', - title: 'jq Filter', - description: - 'A jq filter to apply to the response to include certain fields. Consult the output schema in the tool description to see the fields that are available.\n\nFor example: to include only the `name` field in every object of a results array, you can provide ".results[].name".\n\nFor more information, see the [jq documentation](https://jqlang.org/manual/).', - }, - }, - required: [], - }, - annotations: { - readOnlyHint: true, - }, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const { jq_filter, ...body } = args as any; - try { - return asTextContentResult(await maybeFilter(jq_filter, await client.files.list(body))); - } catch (error) { - if (error instanceof ScanDocuments.APIError || isJqError(error)) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/files/retrieve-files.ts b/packages/mcp-server/src/tools/files/retrieve-files.ts deleted file mode 100644 index c8cb762..0000000 --- a/packages/mcp-server/src/tools/files/retrieve-files.ts +++ /dev/null @@ -1,54 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { isJqError, maybeFilter } from 'scan-documents-mcp/filtering'; -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'files', - operation: 'read', - tags: [], - httpMethod: 'get', - httpPath: '/v1/files/{id}', - operationId: 'getFile', -}; - -export const tool: Tool = { - name: 'retrieve_files', - description: - "When using this tool, always use the `jq_filter` parameter to reduce the response size and improve performance.\n\nOnly omit if you're sure you don't need the data.\n\nRetrieves the data for a specific file by its ID.\n\n# Response Schema\n```json\n{\n $ref: '#/$defs/file',\n $defs: {\n file: {\n anyOf: [ {\n type: 'object',\n title: 'Image Response',\n description: 'The response for an image file',\n properties: {\n id: {\n type: 'string',\n description: 'The id of the file'\n },\n created_at: {\n type: 'string',\n description: 'The creation date of the file in ISO format'\n },\n name: {\n type: 'string',\n description: 'The name of the file'\n },\n properties: {\n type: 'object',\n properties: {\n height: {\n type: 'number',\n description: 'The height of the image in pixels'\n },\n size: {\n type: 'number',\n description: 'The size of the image in bytes'\n },\n width: {\n type: 'number',\n description: 'The width of the image in pixels'\n }\n },\n required: [ 'height',\n 'size',\n 'width'\n ]\n },\n task_id: {\n type: 'string',\n description: 'The id of the task that generated this file, if any'\n },\n type: {\n type: 'string',\n description: 'The MIME type of the file',\n enum: [ 'image/png',\n 'image/jpeg',\n 'image/webp'\n ]\n }\n },\n required: [ 'id',\n 'created_at',\n 'name',\n 'properties',\n 'task_id',\n 'type'\n ]\n },\n {\n type: 'object',\n title: 'Document Response',\n description: 'The response for a document file',\n properties: {\n id: {\n type: 'string',\n description: 'The id of the file'\n },\n created_at: {\n type: 'string',\n description: 'The creation date of the file in ISO format'\n },\n name: {\n type: 'string',\n description: 'The name of the file'\n },\n properties: {\n type: 'object',\n properties: {\n page_count: {\n type: 'number',\n description: 'The number of pages in the document'\n },\n size: {\n type: 'number',\n description: 'The size of the document in bytes'\n }\n },\n required: [ 'page_count',\n 'size'\n ]\n },\n task_id: {\n type: 'string',\n description: 'The id of the task that generated this file, if any'\n },\n type: {\n type: 'string',\n description: 'The MIME type of the file',\n enum: [ 'application/pdf'\n ]\n }\n },\n required: [ 'id',\n 'created_at',\n 'name',\n 'properties',\n 'task_id',\n 'type'\n ]\n }\n ],\n title: 'File Response',\n description: 'The response for a file. Properties depend on the file type.'\n }\n }\n}\n```", - inputSchema: { - type: 'object', - properties: { - id: { - type: 'string', - }, - jq_filter: { - type: 'string', - title: 'jq Filter', - description: - 'A jq filter to apply to the response to include certain fields. Consult the output schema in the tool description to see the fields that are available.\n\nFor example: to include only the `name` field in every object of a results array, you can provide ".results[].name".\n\nFor more information, see the [jq documentation](https://jqlang.org/manual/).', - }, - }, - required: ['id'], - }, - annotations: { - readOnlyHint: true, - }, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const { id, jq_filter, ...body } = args as any; - try { - return asTextContentResult(await maybeFilter(jq_filter, await client.files.retrieve(id))); - } catch (error) { - if (error instanceof ScanDocuments.APIError || isJqError(error)) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/files/upload-files.ts b/packages/mcp-server/src/tools/files/upload-files.ts deleted file mode 100644 index 6759245..0000000 --- a/packages/mcp-server/src/tools/files/upload-files.ts +++ /dev/null @@ -1,57 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { isJqError, maybeFilter } from 'scan-documents-mcp/filtering'; -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'files', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v1/files', - operationId: 'uploadFile', -}; - -export const tool: Tool = { - name: 'upload_files', - description: - "When using this tool, always use the `jq_filter` parameter to reduce the response size and improve performance.\n\nOnly omit if you're sure you don't need the data.\n\nUploads a file to the user's storage. The file size is limited to 10MB.\n\n# Response Schema\n```json\n{\n $ref: '#/$defs/file',\n $defs: {\n file: {\n anyOf: [ {\n type: 'object',\n title: 'Image Response',\n description: 'The response for an image file',\n properties: {\n id: {\n type: 'string',\n description: 'The id of the file'\n },\n created_at: {\n type: 'string',\n description: 'The creation date of the file in ISO format'\n },\n name: {\n type: 'string',\n description: 'The name of the file'\n },\n properties: {\n type: 'object',\n properties: {\n height: {\n type: 'number',\n description: 'The height of the image in pixels'\n },\n size: {\n type: 'number',\n description: 'The size of the image in bytes'\n },\n width: {\n type: 'number',\n description: 'The width of the image in pixels'\n }\n },\n required: [ 'height',\n 'size',\n 'width'\n ]\n },\n task_id: {\n type: 'string',\n description: 'The id of the task that generated this file, if any'\n },\n type: {\n type: 'string',\n description: 'The MIME type of the file',\n enum: [ 'image/png',\n 'image/jpeg',\n 'image/webp'\n ]\n }\n },\n required: [ 'id',\n 'created_at',\n 'name',\n 'properties',\n 'task_id',\n 'type'\n ]\n },\n {\n type: 'object',\n title: 'Document Response',\n description: 'The response for a document file',\n properties: {\n id: {\n type: 'string',\n description: 'The id of the file'\n },\n created_at: {\n type: 'string',\n description: 'The creation date of the file in ISO format'\n },\n name: {\n type: 'string',\n description: 'The name of the file'\n },\n properties: {\n type: 'object',\n properties: {\n page_count: {\n type: 'number',\n description: 'The number of pages in the document'\n },\n size: {\n type: 'number',\n description: 'The size of the document in bytes'\n }\n },\n required: [ 'page_count',\n 'size'\n ]\n },\n task_id: {\n type: 'string',\n description: 'The id of the task that generated this file, if any'\n },\n type: {\n type: 'string',\n description: 'The MIME type of the file',\n enum: [ 'application/pdf'\n ]\n }\n },\n required: [ 'id',\n 'created_at',\n 'name',\n 'properties',\n 'task_id',\n 'type'\n ]\n }\n ],\n title: 'File Response',\n description: 'The response for a file. Properties depend on the file type.'\n }\n }\n}\n```", - inputSchema: { - type: 'object', - properties: { - file: { - type: 'string', - description: 'The file to upload', - }, - name: { - type: 'string', - description: 'The name of the file', - }, - jq_filter: { - type: 'string', - title: 'jq Filter', - description: - 'A jq filter to apply to the response to include certain fields. Consult the output schema in the tool description to see the fields that are available.\n\nFor example: to include only the `name` field in every object of a results array, you can provide ".results[].name".\n\nFor more information, see the [jq documentation](https://jqlang.org/manual/).', - }, - }, - required: ['file', 'name'], - }, - annotations: {}, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const { jq_filter, ...body } = args as any; - try { - return asTextContentResult(await maybeFilter(jq_filter, await client.files.upload(body))); - } catch (error) { - if (error instanceof ScanDocuments.APIError || isJqError(error)) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/image-operations/apply-effect-image-operations.ts b/packages/mcp-server/src/tools/image-operations/apply-effect-image-operations.ts deleted file mode 100644 index ccab319..0000000 --- a/packages/mcp-server/src/tools/image-operations/apply-effect-image-operations.ts +++ /dev/null @@ -1,59 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'image_operations', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v1/image-operations/apply-effect', - operationId: 'applyEffect', -}; - -export const tool: Tool = { - name: 'apply_effect_image_operations', - description: 'Creates a task to apply a specified visual effect to an image.', - inputSchema: { - type: 'object', - properties: { - effect: { - type: 'string', - description: 'The effect to apply to the image', - enum: ['grayscale', 'scanner', 'black-background'], - }, - input: { - type: 'string', - description: 'The id of the file or task to operate on.', - }, - callback_url: { - type: 'string', - description: - 'The URL to call when the task is completed or failed. If you want to receive events, you probably prefer to use `webhooks` instead.', - }, - name: { - type: 'string', - description: 'The name of the file', - }, - }, - required: ['effect', 'input'], - }, - annotations: {}, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const body = args as any; - try { - return asTextContentResult(await client.imageOperations.applyEffect(body)); - } catch (error) { - if (error instanceof ScanDocuments.APIError) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/image-operations/convert-image-operations.ts b/packages/mcp-server/src/tools/image-operations/convert-image-operations.ts deleted file mode 100644 index 856917a..0000000 --- a/packages/mcp-server/src/tools/image-operations/convert-image-operations.ts +++ /dev/null @@ -1,120 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'image_operations', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v1/image-operations/convert', - operationId: 'convertImage', -}; - -export const tool: Tool = { - name: 'convert_image_operations', - description: 'Creates a task to convert an image file to a different format.', - inputSchema: { - type: 'object', - anyOf: [ - { - type: 'object', - properties: { - input: { - type: 'string', - description: 'The id of the file or task to operate on.', - }, - target_format: { - type: 'string', - description: 'The format to convert the image to.', - enum: ['image/png'], - }, - callback_url: { - type: 'string', - description: - 'The URL to call when the task is completed or failed. If you want to receive events, you probably prefer to use `webhooks` instead.', - }, - name: { - type: 'string', - description: 'The name of the file', - }, - }, - required: ['input', 'target_format'], - }, - { - type: 'object', - properties: { - input: { - type: 'string', - description: 'The id of the file or task to operate on.', - }, - quality: { - type: 'number', - description: 'Image quality (1-100) for lossy formats like jpeg.', - }, - target_format: { - type: 'string', - description: 'The format to convert the image to.', - enum: ['image/jpeg'], - }, - callback_url: { - type: 'string', - description: - 'The URL to call when the task is completed or failed. If you want to receive events, you probably prefer to use `webhooks` instead.', - }, - name: { - type: 'string', - description: 'The name of the file', - }, - }, - required: ['input', 'quality', 'target_format'], - }, - { - type: 'object', - properties: { - input: { - type: 'string', - description: 'The id of the file or task to operate on.', - }, - quality: { - type: 'number', - description: 'Image quality (1-100) for lossy formats like webp.', - }, - target_format: { - type: 'string', - description: 'The format to convert the image to.', - enum: ['image/webp'], - }, - callback_url: { - type: 'string', - description: - 'The URL to call when the task is completed or failed. If you want to receive events, you probably prefer to use `webhooks` instead.', - }, - name: { - type: 'string', - description: 'The name of the file', - }, - }, - required: ['input', 'quality', 'target_format'], - }, - ], - }, - annotations: {}, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const body = args as any; - try { - return asTextContentResult(await client.imageOperations.convert(body)); - } catch (error) { - if (error instanceof ScanDocuments.APIError) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/image-operations/detect-documents-image-operations.ts b/packages/mcp-server/src/tools/image-operations/detect-documents-image-operations.ts deleted file mode 100644 index 4424a00..0000000 --- a/packages/mcp-server/src/tools/image-operations/detect-documents-image-operations.ts +++ /dev/null @@ -1,50 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'image_operations', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v1/image-operations/detect-documents', - operationId: 'detectDocuments', -}; - -export const tool: Tool = { - name: 'detect_documents_image_operations', - description: 'Creates a task to detect document boundaries within an image.', - inputSchema: { - type: 'object', - properties: { - input: { - type: 'string', - description: 'The id of the file or task to operate on.', - }, - callback_url: { - type: 'string', - description: - 'The URL to call when the task is completed or failed. If you want to receive events, you probably prefer to use `webhooks` instead.', - }, - }, - required: ['input'], - }, - annotations: {}, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const body = args as any; - try { - return asTextContentResult(await client.imageOperations.detectDocuments(body)); - } catch (error) { - if (error instanceof ScanDocuments.APIError) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/image-operations/extract-text-image-operations.ts b/packages/mcp-server/src/tools/image-operations/extract-text-image-operations.ts deleted file mode 100644 index fe29047..0000000 --- a/packages/mcp-server/src/tools/image-operations/extract-text-image-operations.ts +++ /dev/null @@ -1,159 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'image_operations', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v1/image-operations/extract-text', - operationId: 'extractText', -}; - -export const tool: Tool = { - name: 'extract_text_image_operations', - description: 'Creates a task to extract text from a specified image file.', - inputSchema: { - type: 'object', - anyOf: [ - { - type: 'object', - properties: { - format: { - type: 'string', - description: 'The format of the text to be extracted.', - enum: ['plain'], - }, - input: { - type: 'string', - description: 'The id of the file or task to operate on.', - }, - callback_url: { - type: 'string', - description: - 'The URL to call when the task is completed or failed. If you want to receive events, you probably prefer to use `webhooks` instead.', - }, - }, - required: ['format', 'input'], - }, - { - type: 'object', - properties: { - format: { - type: 'string', - description: 'The format of the text to be extracted.', - enum: ['markdown'], - }, - input: { - type: 'string', - description: 'The id of the file or task to operate on.', - }, - callback_url: { - type: 'string', - description: - 'The URL to call when the task is completed or failed. If you want to receive events, you probably prefer to use `webhooks` instead.', - }, - }, - required: ['format', 'input'], - }, - { - type: 'object', - properties: { - format: { - type: 'string', - description: 'The format of the text to be extracted.', - enum: ['html'], - }, - input: { - type: 'string', - description: 'The id of the file or task to operate on.', - }, - callback_url: { - type: 'string', - description: - 'The URL to call when the task is completed or failed. If you want to receive events, you probably prefer to use `webhooks` instead.', - }, - }, - required: ['format', 'input'], - }, - { - type: 'object', - properties: { - format: { - type: 'string', - description: 'The format of the text to be extracted.', - enum: ['json'], - }, - input: { - type: 'string', - description: 'The id of the file or task to operate on.', - }, - schema: { - $ref: '#/$defs/json_schema_spec', - }, - callback_url: { - type: 'string', - description: - 'The URL to call when the task is completed or failed. If you want to receive events, you probably prefer to use `webhooks` instead.', - }, - }, - required: ['format', 'input', 'schema'], - }, - ], - $defs: { - json_schema_spec: { - type: 'object', - description: - "An OpenAPI schema object describing the expected JSON structure. Required if format is 'json'.", - properties: { - description: { - type: 'string', - }, - example: { - type: 'object', - additionalProperties: true, - }, - format: { - type: 'string', - }, - items: { - $ref: '#/$defs/json_schema_spec', - }, - properties: { - type: 'object', - additionalProperties: true, - }, - required: { - type: 'array', - items: { - type: 'string', - }, - }, - type: { - type: 'string', - enum: ['string', 'number', 'integer', 'boolean', 'array', 'object'], - }, - }, - }, - }, - }, - annotations: {}, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const body = args as any; - try { - return asTextContentResult(await client.imageOperations.extractText(body)); - } catch (error) { - if (error instanceof ScanDocuments.APIError) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/image-operations/scan-image-operations.ts b/packages/mcp-server/src/tools/image-operations/scan-image-operations.ts deleted file mode 100644 index 83b32b1..0000000 --- a/packages/mcp-server/src/tools/image-operations/scan-image-operations.ts +++ /dev/null @@ -1,66 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'image_operations', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v1/image-operations/scan', - operationId: 'scanImage', -}; - -export const tool: Tool = { - name: 'scan_image_operations', - description: - 'Creates a task to scan an image file. \nThis is an equivalent operation for `detect-documents` and `warp` combined, additionally it can apply effects to the scanned image.', - inputSchema: { - type: 'object', - properties: { - effect: { - type: 'string', - description: 'The effect to apply to the image', - enum: ['none', 'grayscale', 'scanner', 'black-background'], - }, - input: { - type: 'string', - description: 'The id of the file or task to operate on.', - }, - scan_mode: { - type: 'string', - description: - "Mode for detecting documents in the image. Available modes are:\n- **none**: No document detection is performed.\n- **standard**: Using a quick algorithm. Document is detected in the image, and the image is cropped to the detected document area fixing the perspective to match the document's shape.", - enum: ['none', 'standard'], - }, - callback_url: { - type: 'string', - description: - 'The URL to call when the task is completed or failed. If you want to receive events, you probably prefer to use `webhooks` instead.', - }, - name: { - type: 'string', - description: 'The name of the file', - }, - }, - required: ['effect', 'input', 'scan_mode'], - }, - annotations: {}, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const body = args as any; - try { - return asTextContentResult(await client.imageOperations.scan(body)); - } catch (error) { - if (error instanceof ScanDocuments.APIError) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/image-operations/warp-image-operations.ts b/packages/mcp-server/src/tools/image-operations/warp-image-operations.ts deleted file mode 100644 index c9b7271..0000000 --- a/packages/mcp-server/src/tools/image-operations/warp-image-operations.ts +++ /dev/null @@ -1,63 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'image_operations', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v1/image-operations/warp', - operationId: 'warpImage', -}; - -export const tool: Tool = { - name: 'warp_image_operations', - description: - 'Creates a task to apply perspective correction (warp) to an image based on detected document boundaries.', - inputSchema: { - type: 'object', - properties: { - input: { - type: 'string', - description: 'The id of the file or task to operate on.', - }, - vertices: { - type: 'array', - description: 'Coordinates of the 4 vertices of the quadrilateral to warp the image to.', - items: { - type: 'object', - additionalProperties: true, - }, - }, - callback_url: { - type: 'string', - description: - 'The URL to call when the task is completed or failed. If you want to receive events, you probably prefer to use `webhooks` instead.', - }, - name: { - type: 'string', - description: 'The name of the file', - }, - }, - required: ['input', 'vertices'], - }, - annotations: {}, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const body = args as any; - try { - return asTextContentResult(await client.imageOperations.warp(body)); - } catch (error) { - if (error instanceof ScanDocuments.APIError) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/index.ts b/packages/mcp-server/src/tools/index.ts deleted file mode 100644 index f862226..0000000 --- a/packages/mcp-server/src/tools/index.ts +++ /dev/null @@ -1,105 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, Endpoint, HandlerFunction } from './types'; - -export { Metadata, Endpoint, HandlerFunction }; - -import retrieve_files from './files/retrieve-files'; -import list_files from './files/list-files'; -import delete_files from './files/delete-files'; -import download_files from './files/download-files'; -import upload_files from './files/upload-files'; -import retrieve_tasks from './tasks/retrieve-tasks'; -import list_tasks from './tasks/list-tasks'; -import list_events from './events/list-events'; -import apply_effect_image_operations from './image-operations/apply-effect-image-operations'; -import convert_image_operations from './image-operations/convert-image-operations'; -import detect_documents_image_operations from './image-operations/detect-documents-image-operations'; -import extract_text_image_operations from './image-operations/extract-text-image-operations'; -import scan_image_operations from './image-operations/scan-image-operations'; -import warp_image_operations from './image-operations/warp-image-operations'; -import extract_pages_pdf_operations from './pdf-operations/extract-pages-pdf-operations'; -import merge_pdf_operations from './pdf-operations/merge-pdf-operations'; -import render_pdf_operations from './pdf-operations/render-pdf-operations'; -import split_pdf_operations from './pdf-operations/split-pdf-operations'; - -export const endpoints: Endpoint[] = []; - -function addEndpoint(endpoint: Endpoint) { - endpoints.push(endpoint); -} - -addEndpoint(retrieve_files); -addEndpoint(list_files); -addEndpoint(delete_files); -addEndpoint(download_files); -addEndpoint(upload_files); -addEndpoint(retrieve_tasks); -addEndpoint(list_tasks); -addEndpoint(list_events); -addEndpoint(apply_effect_image_operations); -addEndpoint(convert_image_operations); -addEndpoint(detect_documents_image_operations); -addEndpoint(extract_text_image_operations); -addEndpoint(scan_image_operations); -addEndpoint(warp_image_operations); -addEndpoint(extract_pages_pdf_operations); -addEndpoint(merge_pdf_operations); -addEndpoint(render_pdf_operations); -addEndpoint(split_pdf_operations); - -export type Filter = { - type: 'resource' | 'operation' | 'tag' | 'tool'; - op: 'include' | 'exclude'; - value: string; -}; - -export function query(filters: Filter[], endpoints: Endpoint[]): Endpoint[] { - const allExcludes = filters.length > 0 && filters.every((filter) => filter.op === 'exclude'); - const unmatchedFilters = new Set(filters); - - const filtered = endpoints.filter((endpoint: Endpoint) => { - let included = false || allExcludes; - - for (const filter of filters) { - if (match(filter, endpoint)) { - unmatchedFilters.delete(filter); - included = filter.op === 'include'; - } - } - - return included; - }); - - // Check if any filters didn't match - const unmatched = Array.from(unmatchedFilters).filter((f) => f.type === 'tool' || f.type === 'resource'); - if (unmatched.length > 0) { - throw new Error( - `The following filters did not match any endpoints: ${unmatched - .map((f) => `${f.type}=${f.value}`) - .join(', ')}`, - ); - } - - return filtered; -} - -function match({ type, value }: Filter, endpoint: Endpoint): boolean { - switch (type) { - case 'resource': { - const regexStr = '^' + normalizeResource(value).replace(/\*/g, '.*') + '$'; - const regex = new RegExp(regexStr); - return regex.test(normalizeResource(endpoint.metadata.resource)); - } - case 'operation': - return endpoint.metadata.operation === value; - case 'tag': - return endpoint.metadata.tags.includes(value); - case 'tool': - return endpoint.tool.name === value; - } -} - -function normalizeResource(resource: string): string { - return resource.toLowerCase().replace(/[^a-z.*\-_]*/g, ''); -} diff --git a/packages/mcp-server/src/tools/pdf-operations/extract-pages-pdf-operations.ts b/packages/mcp-server/src/tools/pdf-operations/extract-pages-pdf-operations.ts deleted file mode 100644 index ee7c8c9..0000000 --- a/packages/mcp-server/src/tools/pdf-operations/extract-pages-pdf-operations.ts +++ /dev/null @@ -1,58 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'pdf_operations', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v1/pdf-operations/extract-pages', - operationId: 'extractPdfPages', -}; - -export const tool: Tool = { - name: 'extract_pages_pdf_operations', - description: 'Creates a task to extract specific pages from a PDF file into a new PDF file.', - inputSchema: { - type: 'object', - properties: { - input: { - type: 'string', - description: 'The id of the file or task to operate on.', - }, - pages: { - type: 'string', - description: 'Page range (e.g., 2-7), a comma-separated list (e.g., 2,3,7) of pages.', - }, - callback_url: { - type: 'string', - description: - 'The URL to call when the task is completed or failed. If you want to receive events, you probably prefer to use `webhooks` instead.', - }, - name: { - type: 'string', - description: 'The name of the file', - }, - }, - required: ['input', 'pages'], - }, - annotations: {}, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const body = args as any; - try { - return asTextContentResult(await client.pdfOperations.extractPages(body)); - } catch (error) { - if (error instanceof ScanDocuments.APIError) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/pdf-operations/merge-pdf-operations.ts b/packages/mcp-server/src/tools/pdf-operations/merge-pdf-operations.ts deleted file mode 100644 index d12fc62..0000000 --- a/packages/mcp-server/src/tools/pdf-operations/merge-pdf-operations.ts +++ /dev/null @@ -1,58 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'pdf_operations', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v1/pdf-operations/merge', - operationId: 'mergePdf', -}; - -export const tool: Tool = { - name: 'merge_pdf_operations', - description: 'Creates a task to merge multiple PDF and/or image files into a single PDF file.', - inputSchema: { - type: 'object', - properties: { - input: { - type: 'array', - description: 'The list of ids of the files to be merged', - items: { - type: 'string', - description: 'The id of the file or task to operate on.', - }, - }, - callback_url: { - type: 'string', - description: - 'The URL to call when the task is completed or failed. If you want to receive events, you probably prefer to use `webhooks` instead.', - }, - name: { - type: 'string', - description: 'The name of the file', - }, - }, - required: ['input'], - }, - annotations: {}, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const body = args as any; - try { - return asTextContentResult(await client.pdfOperations.merge(body)); - } catch (error) { - if (error instanceof ScanDocuments.APIError) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/pdf-operations/render-pdf-operations.ts b/packages/mcp-server/src/tools/pdf-operations/render-pdf-operations.ts deleted file mode 100644 index a173901..0000000 --- a/packages/mcp-server/src/tools/pdf-operations/render-pdf-operations.ts +++ /dev/null @@ -1,62 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'pdf_operations', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v1/pdf-operations/render', - operationId: 'renderPdf', -}; - -export const tool: Tool = { - name: 'render_pdf_operations', - description: 'Creates a task to render specified pages of a PDF file as images.', - inputSchema: { - type: 'object', - properties: { - input: { - type: 'string', - description: 'The id of the file or task to operate on.', - }, - callback_url: { - type: 'string', - description: - 'The URL to call when the task is completed or failed. If you want to receive events, you probably prefer to use `webhooks` instead.', - }, - dpi: { - type: 'integer', - description: 'Dots per inch (DPI) for the rendered image. Default is 300.', - }, - name: { - type: 'string', - description: 'The name of the file', - }, - pages: { - type: 'string', - description: 'Page range (e.g., 2-7), a comma-separated list (e.g., 2,3,7) of pages.', - }, - }, - required: ['input'], - }, - annotations: {}, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const body = args as any; - try { - return asTextContentResult(await client.pdfOperations.render(body)); - } catch (error) { - if (error instanceof ScanDocuments.APIError) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/pdf-operations/split-pdf-operations.ts b/packages/mcp-server/src/tools/pdf-operations/split-pdf-operations.ts deleted file mode 100644 index a33db6c..0000000 --- a/packages/mcp-server/src/tools/pdf-operations/split-pdf-operations.ts +++ /dev/null @@ -1,54 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'pdf_operations', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v1/pdf-operations/split', - operationId: 'splitPdf', -}; - -export const tool: Tool = { - name: 'split_pdf_operations', - description: 'Creates a task to split a PDF file into multiple single-page PDF files.', - inputSchema: { - type: 'object', - properties: { - input: { - type: 'string', - description: 'The id of the file or task to operate on.', - }, - callback_url: { - type: 'string', - description: - 'The URL to call when the task is completed or failed. If you want to receive events, you probably prefer to use `webhooks` instead.', - }, - name: { - type: 'string', - description: 'The name of the file', - }, - }, - required: ['input'], - }, - annotations: {}, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const body = args as any; - try { - return asTextContentResult(await client.pdfOperations.split(body)); - } catch (error) { - if (error instanceof ScanDocuments.APIError) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/tasks/list-tasks.ts b/packages/mcp-server/src/tools/tasks/list-tasks.ts deleted file mode 100644 index 924d088..0000000 --- a/packages/mcp-server/src/tools/tasks/list-tasks.ts +++ /dev/null @@ -1,49 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'tasks', - operation: 'read', - tags: [], - httpMethod: 'get', - httpPath: '/v1/tasks', - operationId: 'listTasks', -}; - -export const tool: Tool = { - name: 'list_tasks', - description: 'Retrieves a paginated list of tasks belonging to the authenticated user.', - inputSchema: { - type: 'object', - properties: { - from: { - type: 'string', - }, - take: { - type: 'number', - }, - }, - required: [], - }, - annotations: { - readOnlyHint: true, - }, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const body = args as any; - try { - return asTextContentResult(await client.tasks.list(body)); - } catch (error) { - if (error instanceof ScanDocuments.APIError) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/tasks/retrieve-tasks.ts b/packages/mcp-server/src/tools/tasks/retrieve-tasks.ts deleted file mode 100644 index 4bac651..0000000 --- a/packages/mcp-server/src/tools/tasks/retrieve-tasks.ts +++ /dev/null @@ -1,47 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asErrorResult, asTextContentResult } from 'scan-documents-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import ScanDocuments from 'scan-documents'; - -export const metadata: Metadata = { - resource: 'tasks', - operation: 'read', - tags: [], - httpMethod: 'get', - httpPath: '/v1/tasks/{id}', - operationId: 'getTask', -}; - -export const tool: Tool = { - name: 'retrieve_tasks', - description: 'Retrieves the data for a specific task by its ID.', - inputSchema: { - type: 'object', - properties: { - id: { - type: 'string', - description: 'The id of the task to get.', - }, - }, - required: ['id'], - }, - annotations: { - readOnlyHint: true, - }, -}; - -export const handler = async (client: ScanDocuments, args: Record | undefined) => { - const { id, ...body } = args as any; - try { - return asTextContentResult(await client.tasks.retrieve(id)); - } catch (error) { - if (error instanceof ScanDocuments.APIError) { - return asErrorResult(error.message); - } - throw error; - } -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/types.ts b/packages/mcp-server/src/types.ts similarity index 98% rename from packages/mcp-server/src/tools/types.ts rename to packages/mcp-server/src/types.ts index 119bb90..651bbc1 100644 --- a/packages/mcp-server/src/tools/types.ts +++ b/packages/mcp-server/src/types.ts @@ -108,7 +108,7 @@ export type Metadata = { operationId?: string; }; -export type Endpoint = { +export type McpTool = { metadata: Metadata; tool: Tool; handler: HandlerFunction; diff --git a/packages/mcp-server/tests/compat.test.ts b/packages/mcp-server/tests/compat.test.ts deleted file mode 100644 index d6272f6..0000000 --- a/packages/mcp-server/tests/compat.test.ts +++ /dev/null @@ -1,1166 +0,0 @@ -import { - truncateToolNames, - removeTopLevelUnions, - removeAnyOf, - inlineRefs, - applyCompatibilityTransformations, - removeFormats, - findUsedDefs, -} from '../src/compat'; -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import { JSONSchema } from '../src/compat'; -import { Endpoint } from '../src/tools'; - -describe('truncateToolNames', () => { - it('should return original names when maxLength is 0 or negative', () => { - const names = ['tool1', 'tool2', 'tool3']; - expect(truncateToolNames(names, 0)).toEqual(new Map()); - expect(truncateToolNames(names, -1)).toEqual(new Map()); - }); - - it('should return original names when all names are shorter than maxLength', () => { - const names = ['tool1', 'tool2', 'tool3']; - expect(truncateToolNames(names, 10)).toEqual(new Map()); - }); - - it('should truncate names longer than maxLength', () => { - const names = ['very-long-tool-name', 'another-long-tool-name', 'short']; - expect(truncateToolNames(names, 10)).toEqual( - new Map([ - ['very-long-tool-name', 'very-long-'], - ['another-long-tool-name', 'another-lo'], - ]), - ); - }); - - it('should handle duplicate truncated names by appending numbers', () => { - const names = ['tool-name-a', 'tool-name-b', 'tool-name-c']; - expect(truncateToolNames(names, 8)).toEqual( - new Map([ - ['tool-name-a', 'tool-na1'], - ['tool-name-b', 'tool-na2'], - ['tool-name-c', 'tool-na3'], - ]), - ); - }); -}); - -describe('removeTopLevelUnions', () => { - const createTestTool = (overrides = {}): Tool => ({ - name: 'test-tool', - description: 'Test tool', - inputSchema: { - type: 'object', - properties: {}, - }, - ...overrides, - }); - - it('should return the original tool if it has no anyOf at the top level', () => { - const tool = createTestTool({ - inputSchema: { - type: 'object', - properties: { - foo: { type: 'string' }, - }, - }, - }); - - expect(removeTopLevelUnions(tool)).toEqual([tool]); - }); - - it('should split a tool with top-level anyOf into multiple tools', () => { - const tool = createTestTool({ - name: 'union-tool', - description: 'A tool with unions', - inputSchema: { - type: 'object', - properties: { - common: { type: 'string' }, - }, - anyOf: [ - { - title: 'first variant', - description: 'Its the first variant', - properties: { - variant1: { type: 'string' }, - }, - required: ['variant1'], - }, - { - title: 'second variant', - properties: { - variant2: { type: 'number' }, - }, - required: ['variant2'], - }, - ], - }, - }); - - const result = removeTopLevelUnions(tool); - - expect(result).toEqual([ - { - name: 'union-tool_first_variant', - description: 'Its the first variant', - inputSchema: { - type: 'object', - title: 'first variant', - description: 'Its the first variant', - properties: { - common: { type: 'string' }, - variant1: { type: 'string' }, - }, - required: ['variant1'], - }, - }, - { - name: 'union-tool_second_variant', - description: 'A tool with unions', - inputSchema: { - type: 'object', - title: 'second variant', - description: 'A tool with unions', - properties: { - common: { type: 'string' }, - variant2: { type: 'number' }, - }, - required: ['variant2'], - }, - }, - ]); - }); - - it('should handle $defs and only include those used by the variant', () => { - const tool = createTestTool({ - name: 'defs-tool', - description: 'A tool with $defs', - inputSchema: { - type: 'object', - properties: { - common: { type: 'string' }, - }, - $defs: { - def1: { type: 'string', format: 'email' }, - def2: { type: 'number', minimum: 0 }, - unused: { type: 'boolean' }, - }, - anyOf: [ - { - properties: { - email: { $ref: '#/$defs/def1' }, - }, - }, - { - properties: { - count: { $ref: '#/$defs/def2' }, - }, - }, - ], - }, - }); - - const result = removeTopLevelUnions(tool); - - expect(result).toEqual([ - { - name: 'defs-tool_variant1', - description: 'A tool with $defs', - inputSchema: { - type: 'object', - description: 'A tool with $defs', - properties: { - common: { type: 'string' }, - email: { $ref: '#/$defs/def1' }, - }, - $defs: { - def1: { type: 'string', format: 'email' }, - }, - }, - }, - { - name: 'defs-tool_variant2', - description: 'A tool with $defs', - inputSchema: { - type: 'object', - description: 'A tool with $defs', - properties: { - common: { type: 'string' }, - count: { $ref: '#/$defs/def2' }, - }, - $defs: { - def2: { type: 'number', minimum: 0 }, - }, - }, - }, - ]); - }); -}); - -describe('removeAnyOf', () => { - it('should return original schema if it has no anyOf', () => { - const schema = { - type: 'object', - properties: { - foo: { type: 'string' }, - bar: { type: 'number' }, - }, - }; - - expect(removeAnyOf(schema)).toEqual(schema); - }); - - it('should remove anyOf field and use the first variant', () => { - const schema = { - type: 'object', - properties: { - common: { type: 'string' }, - }, - anyOf: [ - { - properties: { - variant1: { type: 'string' }, - }, - required: ['variant1'], - }, - { - properties: { - variant2: { type: 'number' }, - }, - required: ['variant2'], - }, - ], - }; - - const expected = { - type: 'object', - properties: { - common: { type: 'string' }, - variant1: { type: 'string' }, - }, - required: ['variant1'], - }; - - expect(removeAnyOf(schema)).toEqual(expected); - }); - - it('should recursively remove anyOf fields from nested properties', () => { - const schema = { - type: 'object', - properties: { - foo: { type: 'string' }, - nested: { - type: 'object', - properties: { - bar: { type: 'number' }, - }, - anyOf: [ - { - properties: { - option1: { type: 'boolean' }, - }, - }, - { - properties: { - option2: { type: 'array' }, - }, - }, - ], - }, - }, - }; - - const expected = { - type: 'object', - properties: { - foo: { type: 'string' }, - nested: { - type: 'object', - properties: { - bar: { type: 'number' }, - option1: { type: 'boolean' }, - }, - }, - }, - }; - - expect(removeAnyOf(schema)).toEqual(expected); - }); - - it('should handle arrays', () => { - const schema = { - type: 'object', - properties: { - items: { - type: 'array', - items: { - anyOf: [{ type: 'string' }, { type: 'number' }], - }, - }, - }, - }; - - const expected = { - type: 'object', - properties: { - items: { - type: 'array', - items: { - type: 'string', - }, - }, - }, - }; - - expect(removeAnyOf(schema)).toEqual(expected); - }); -}); - -describe('findUsedDefs', () => { - it('should handle circular references without stack overflow', () => { - const defs = { - person: { - type: 'object', - properties: { - name: { type: 'string' }, - friend: { $ref: '#/$defs/person' }, // Circular reference - }, - }, - }; - - const schema = { - type: 'object', - properties: { - user: { $ref: '#/$defs/person' }, - }, - }; - - // This should not throw a stack overflow error - expect(() => { - const result = findUsedDefs(schema, defs); - expect(result).toHaveProperty('person'); - }).not.toThrow(); - }); - - it('should handle indirect circular references without stack overflow', () => { - const defs = { - node: { - type: 'object', - properties: { - value: { type: 'string' }, - child: { $ref: '#/$defs/childNode' }, - }, - }, - childNode: { - type: 'object', - properties: { - value: { type: 'string' }, - parent: { $ref: '#/$defs/node' }, // Indirect circular reference - }, - }, - }; - - const schema = { - type: 'object', - properties: { - root: { $ref: '#/$defs/node' }, - }, - }; - - // This should not throw a stack overflow error - expect(() => { - const result = findUsedDefs(schema, defs); - expect(result).toHaveProperty('node'); - expect(result).toHaveProperty('childNode'); - }).not.toThrow(); - }); - - it('should find all used definitions in non-circular schemas', () => { - const defs = { - user: { - type: 'object', - properties: { - name: { type: 'string' }, - address: { $ref: '#/$defs/address' }, - }, - }, - address: { - type: 'object', - properties: { - street: { type: 'string' }, - city: { type: 'string' }, - }, - }, - unused: { - type: 'object', - properties: { - data: { type: 'string' }, - }, - }, - }; - - const schema = { - type: 'object', - properties: { - person: { $ref: '#/$defs/user' }, - }, - }; - - const result = findUsedDefs(schema, defs); - expect(result).toHaveProperty('user'); - expect(result).toHaveProperty('address'); - expect(result).not.toHaveProperty('unused'); - }); -}); - -describe('inlineRefs', () => { - it('should return the original schema if it does not contain $refs', () => { - const schema: JSONSchema = { - type: 'object', - properties: { - name: { type: 'string' }, - age: { type: 'number' }, - }, - }; - - expect(inlineRefs(schema)).toEqual(schema); - }); - - it('should inline simple $refs', () => { - const schema: JSONSchema = { - type: 'object', - properties: { - user: { $ref: '#/$defs/user' }, - }, - $defs: { - user: { - type: 'object', - properties: { - name: { type: 'string' }, - email: { type: 'string' }, - }, - }, - }, - }; - - const expected: JSONSchema = { - type: 'object', - properties: { - user: { - type: 'object', - properties: { - name: { type: 'string' }, - email: { type: 'string' }, - }, - }, - }, - }; - - expect(inlineRefs(schema)).toEqual(expected); - }); - - it('should inline nested $refs', () => { - const schema: JSONSchema = { - type: 'object', - properties: { - order: { $ref: '#/$defs/order' }, - }, - $defs: { - order: { - type: 'object', - properties: { - id: { type: 'string' }, - items: { type: 'array', items: { $ref: '#/$defs/item' } }, - }, - }, - item: { - type: 'object', - properties: { - product: { type: 'string' }, - quantity: { type: 'integer' }, - }, - }, - }, - }; - - const expected: JSONSchema = { - type: 'object', - properties: { - order: { - type: 'object', - properties: { - id: { type: 'string' }, - items: { - type: 'array', - items: { - type: 'object', - properties: { - product: { type: 'string' }, - quantity: { type: 'integer' }, - }, - }, - }, - }, - }, - }, - }; - - expect(inlineRefs(schema)).toEqual(expected); - }); - - it('should handle circular references by removing the circular part', () => { - const schema: JSONSchema = { - type: 'object', - properties: { - person: { $ref: '#/$defs/person' }, - }, - $defs: { - person: { - type: 'object', - properties: { - name: { type: 'string' }, - friend: { $ref: '#/$defs/person' }, // Circular reference - }, - }, - }, - }; - - const expected: JSONSchema = { - type: 'object', - properties: { - person: { - type: 'object', - properties: { - name: { type: 'string' }, - // friend property is removed to break the circular reference - }, - }, - }, - }; - - expect(inlineRefs(schema)).toEqual(expected); - }); - - it('should handle indirect circular references', () => { - const schema: JSONSchema = { - type: 'object', - properties: { - node: { $ref: '#/$defs/node' }, - }, - $defs: { - node: { - type: 'object', - properties: { - value: { type: 'string' }, - child: { $ref: '#/$defs/childNode' }, - }, - }, - childNode: { - type: 'object', - properties: { - value: { type: 'string' }, - parent: { $ref: '#/$defs/node' }, // Circular reference through childNode - }, - }, - }, - }; - - const expected: JSONSchema = { - type: 'object', - properties: { - node: { - type: 'object', - properties: { - value: { type: 'string' }, - child: { - type: 'object', - properties: { - value: { type: 'string' }, - // parent property is removed to break the circular reference - }, - }, - }, - }, - }, - }; - - expect(inlineRefs(schema)).toEqual(expected); - }); - - it('should preserve other properties when inlining references', () => { - const schema: JSONSchema = { - type: 'object', - properties: { - address: { $ref: '#/$defs/address', description: 'User address' }, - }, - $defs: { - address: { - type: 'object', - properties: { - street: { type: 'string' }, - city: { type: 'string' }, - }, - required: ['street'], - }, - }, - }; - - const expected: JSONSchema = { - type: 'object', - properties: { - address: { - type: 'object', - description: 'User address', - properties: { - street: { type: 'string' }, - city: { type: 'string' }, - }, - required: ['street'], - }, - }, - }; - - expect(inlineRefs(schema)).toEqual(expected); - }); -}); - -describe('removeFormats', () => { - it('should return original schema if formats capability is true', () => { - const schema = { - type: 'object', - properties: { - date: { type: 'string', description: 'A date field', format: 'date' }, - email: { type: 'string', description: 'An email field', format: 'email' }, - }, - }; - - expect(removeFormats(schema, true)).toEqual(schema); - }); - - it('should move format to description when formats capability is false', () => { - const schema = { - type: 'object', - properties: { - date: { type: 'string', description: 'A date field', format: 'date' }, - email: { type: 'string', description: 'An email field', format: 'email' }, - }, - }; - - const expected = { - type: 'object', - properties: { - date: { type: 'string', description: 'A date field (format: "date")' }, - email: { type: 'string', description: 'An email field (format: "email")' }, - }, - }; - - expect(removeFormats(schema, false)).toEqual(expected); - }); - - it('should handle properties without description', () => { - const schema = { - type: 'object', - properties: { - date: { type: 'string', format: 'date' }, - }, - }; - - const expected = { - type: 'object', - properties: { - date: { type: 'string', description: '(format: "date")' }, - }, - }; - - expect(removeFormats(schema, false)).toEqual(expected); - }); - - it('should handle nested properties', () => { - const schema = { - type: 'object', - properties: { - user: { - type: 'object', - properties: { - created_at: { type: 'string', description: 'Creation date', format: 'date-time' }, - }, - }, - }, - }; - - const expected = { - type: 'object', - properties: { - user: { - type: 'object', - properties: { - created_at: { type: 'string', description: 'Creation date (format: "date-time")' }, - }, - }, - }, - }; - - expect(removeFormats(schema, false)).toEqual(expected); - }); - - it('should handle arrays of objects', () => { - const schema = { - type: 'object', - properties: { - dates: { - type: 'array', - items: { - type: 'object', - properties: { - start: { type: 'string', description: 'Start date', format: 'date' }, - end: { type: 'string', description: 'End date', format: 'date' }, - }, - }, - }, - }, - }; - - const expected = { - type: 'object', - properties: { - dates: { - type: 'array', - items: { - type: 'object', - properties: { - start: { type: 'string', description: 'Start date (format: "date")' }, - end: { type: 'string', description: 'End date (format: "date")' }, - }, - }, - }, - }, - }; - - expect(removeFormats(schema, false)).toEqual(expected); - }); - - it('should handle schemas with $defs', () => { - const schema = { - type: 'object', - properties: { - date: { type: 'string', description: 'A date field', format: 'date' }, - }, - $defs: { - timestamp: { - type: 'string', - description: 'A timestamp field', - format: 'date-time', - }, - }, - }; - - const expected = { - type: 'object', - properties: { - date: { type: 'string', description: 'A date field (format: "date")' }, - }, - $defs: { - timestamp: { - type: 'string', - description: 'A timestamp field (format: "date-time")', - }, - }, - }; - - expect(removeFormats(schema, false)).toEqual(expected); - }); -}); - -describe('applyCompatibilityTransformations', () => { - const createTestTool = (name: string, overrides = {}): Tool => ({ - name, - description: 'Test tool', - inputSchema: { - type: 'object', - properties: {}, - }, - ...overrides, - }); - - const createTestEndpoint = (tool: Tool): Endpoint => ({ - tool, - handler: jest.fn(), - metadata: { - resource: 'test', - operation: 'read' as const, - tags: [], - }, - }); - - it('should not modify endpoints when all capabilities are enabled', () => { - const tool = createTestTool('test-tool'); - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - expect(transformed).toEqual(endpoints); - }); - - it('should split tools with top-level unions when topLevelUnions is disabled', () => { - const tool = createTestTool('union-tool', { - inputSchema: { - type: 'object', - properties: { - common: { type: 'string' }, - }, - anyOf: [ - { - title: 'first variant', - properties: { - variant1: { type: 'string' }, - }, - }, - { - title: 'second variant', - properties: { - variant2: { type: 'number' }, - }, - }, - ], - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: false, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - expect(transformed.length).toBe(2); - expect(transformed[0]!.tool.name).toBe('union-tool_first_variant'); - expect(transformed[1]!.tool.name).toBe('union-tool_second_variant'); - }); - - it('should handle variants without titles in removeTopLevelUnions', () => { - const tool = createTestTool('union-tool', { - inputSchema: { - type: 'object', - properties: { - common: { type: 'string' }, - }, - anyOf: [ - { - properties: { - variant1: { type: 'string' }, - }, - }, - { - properties: { - variant2: { type: 'number' }, - }, - }, - ], - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: false, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - expect(transformed.length).toBe(2); - expect(transformed[0]!.tool.name).toBe('union-tool_variant1'); - expect(transformed[1]!.tool.name).toBe('union-tool_variant2'); - }); - - it('should truncate tool names when toolNameLength is set', () => { - const tools = [ - createTestTool('very-long-tool-name-that-exceeds-limit'), - createTestTool('another-long-tool-name-to-truncate'), - createTestTool('short-name'), - ]; - - const endpoints = tools.map(createTestEndpoint); - - const capabilities = { - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: 20, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - expect(transformed[0]!.tool.name).toBe('very-long-tool-name-'); - expect(transformed[1]!.tool.name).toBe('another-long-tool-na'); - expect(transformed[2]!.tool.name).toBe('short-name'); - }); - - it('should inline refs when refs capability is disabled', () => { - const tool = createTestTool('ref-tool', { - inputSchema: { - type: 'object', - properties: { - user: { $ref: '#/$defs/user' }, - }, - $defs: { - user: { - type: 'object', - properties: { - name: { type: 'string' }, - email: { type: 'string' }, - }, - }, - }, - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: true, - validJson: true, - refs: false, - unions: true, - formats: true, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - const schema = transformed[0]!.tool.inputSchema as JSONSchema; - expect(schema.$defs).toBeUndefined(); - - if (schema.properties) { - expect(schema.properties['user']).toEqual({ - type: 'object', - properties: { - name: { type: 'string' }, - email: { type: 'string' }, - }, - }); - } - }); - - it('should preserve external refs when inlining', () => { - const tool = createTestTool('ref-tool', { - inputSchema: { - type: 'object', - properties: { - internal: { $ref: '#/$defs/internal' }, - external: { $ref: 'https://example.com/schemas/external.json' }, - }, - $defs: { - internal: { - type: 'object', - properties: { - name: { type: 'string' }, - }, - }, - }, - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: true, - validJson: true, - refs: false, - unions: true, - formats: true, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - const schema = transformed[0]!.tool.inputSchema as JSONSchema; - - if (schema.properties) { - expect(schema.properties['internal']).toEqual({ - type: 'object', - properties: { - name: { type: 'string' }, - }, - }); - expect(schema.properties['external']).toEqual({ - $ref: 'https://example.com/schemas/external.json', - }); - } - }); - - it('should remove anyOf fields when unions capability is disabled', () => { - const tool = createTestTool('union-tool', { - inputSchema: { - type: 'object', - properties: { - field: { - anyOf: [{ type: 'string' }, { type: 'number' }], - }, - }, - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: true, - validJson: true, - refs: true, - unions: false, - formats: true, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - const schema = transformed[0]!.tool.inputSchema as JSONSchema; - - if (schema.properties && schema.properties['field']) { - const field = schema.properties['field']; - expect(field.anyOf).toBeUndefined(); - expect(field.type).toBe('string'); - } - }); - - it('should correctly combine topLevelUnions and toolNameLength transformations', () => { - const tool = createTestTool('very-long-union-tool-name', { - inputSchema: { - type: 'object', - properties: { - common: { type: 'string' }, - }, - anyOf: [ - { - title: 'first variant', - properties: { - variant1: { type: 'string' }, - }, - }, - { - title: 'second variant', - properties: { - variant2: { type: 'number' }, - }, - }, - ], - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: false, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: 20, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - expect(transformed.length).toBe(2); - - // Both names should be truncated because they exceed 20 characters - expect(transformed[0]!.tool.name).toBe('very-long-union-too1'); - expect(transformed[1]!.tool.name).toBe('very-long-union-too2'); - }); - - it('should correctly combine refs and unions transformations', () => { - const tool = createTestTool('complex-tool', { - inputSchema: { - type: 'object', - properties: { - user: { $ref: '#/$defs/user' }, - }, - $defs: { - user: { - type: 'object', - properties: { - preference: { - anyOf: [{ type: 'string' }, { type: 'number' }], - }, - }, - }, - }, - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: true, - validJson: true, - refs: false, - unions: false, - formats: true, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - const schema = transformed[0]!.tool.inputSchema as JSONSchema; - - // Refs should be inlined - expect(schema.$defs).toBeUndefined(); - - // Safely access nested properties - if (schema.properties && schema.properties['user']) { - const user = schema.properties['user']; - // User should be inlined - expect(user.type).toBe('object'); - - // AnyOf in the inlined user.preference should be removed - if (user.properties && user.properties['preference']) { - const preference = user.properties['preference']; - expect(preference.anyOf).toBeUndefined(); - expect(preference.type).toBe('string'); - } - } - }); - - it('should handle formats capability being false', () => { - const tool = createTestTool('format-tool', { - inputSchema: { - type: 'object', - properties: { - date: { type: 'string', description: 'A date', format: 'date' }, - }, - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - formats: false, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - const schema = transformed[0]!.tool.inputSchema as JSONSchema; - - if (schema.properties && schema.properties['date']) { - const dateField = schema.properties['date']; - expect(dateField['format']).toBeUndefined(); - expect(dateField['description']).toBe('A date (format: "date")'); - } - }); -}); diff --git a/packages/mcp-server/tests/dynamic-tools.test.ts b/packages/mcp-server/tests/dynamic-tools.test.ts deleted file mode 100644 index 08963af..0000000 --- a/packages/mcp-server/tests/dynamic-tools.test.ts +++ /dev/null @@ -1,185 +0,0 @@ -import { dynamicTools } from '../src/dynamic-tools'; -import { Endpoint } from '../src/tools'; - -describe('dynamicTools', () => { - const fakeClient = {} as any; - - const endpoints: Endpoint[] = [ - makeEndpoint('test_read_endpoint', 'test_resource', 'read', ['test']), - makeEndpoint('test_write_endpoint', 'test_resource', 'write', ['test']), - makeEndpoint('user_endpoint', 'user', 'read', ['user', 'admin']), - makeEndpoint('admin_endpoint', 'admin', 'write', ['admin']), - ]; - - const tools = dynamicTools(endpoints); - - const toolsMap = { - list_api_endpoints: toolOrError('list_api_endpoints'), - get_api_endpoint_schema: toolOrError('get_api_endpoint_schema'), - invoke_api_endpoint: toolOrError('invoke_api_endpoint'), - }; - - describe('list_api_endpoints', () => { - it('should return all endpoints when no search query is provided', async () => { - const content = await toolsMap.list_api_endpoints.handler(fakeClient, {}); - const result = JSON.parse(content.content[0].text); - - expect(result.tools).toHaveLength(endpoints.length); - expect(result.tools.map((t: { name: string }) => t.name)).toContain('test_read_endpoint'); - expect(result.tools.map((t: { name: string }) => t.name)).toContain('test_write_endpoint'); - expect(result.tools.map((t: { name: string }) => t.name)).toContain('user_endpoint'); - expect(result.tools.map((t: { name: string }) => t.name)).toContain('admin_endpoint'); - }); - - it('should filter endpoints by name', async () => { - const content = await toolsMap.list_api_endpoints.handler(fakeClient, { search_query: 'user' }); - const result = JSON.parse(content.content[0].text); - - expect(result.tools).toHaveLength(1); - expect(result.tools[0].name).toBe('user_endpoint'); - }); - - it('should filter endpoints by resource', async () => { - const content = await toolsMap.list_api_endpoints.handler(fakeClient, { search_query: 'admin' }); - const result = JSON.parse(content.content[0].text); - - expect(result.tools.some((t: { resource: string }) => t.resource === 'admin')).toBeTruthy(); - }); - - it('should filter endpoints by tag', async () => { - const content = await toolsMap.list_api_endpoints.handler(fakeClient, { search_query: 'admin' }); - const result = JSON.parse(content.content[0].text); - - expect(result.tools.some((t: { tags: string[] }) => t.tags.includes('admin'))).toBeTruthy(); - }); - - it('should be case insensitive in search', async () => { - const content = await toolsMap.list_api_endpoints.handler(fakeClient, { search_query: 'ADMIN' }); - const result = JSON.parse(content.content[0].text); - - expect(result.tools.length).toBe(2); - result.tools.forEach((tool: { name: string; resource: string; tags: string[] }) => { - expect( - tool.name.toLowerCase().includes('admin') || - tool.resource.toLowerCase().includes('admin') || - tool.tags.some((tag: string) => tag.toLowerCase().includes('admin')), - ).toBeTruthy(); - }); - }); - - it('should filter endpoints by description', async () => { - const content = await toolsMap.list_api_endpoints.handler(fakeClient, { - search_query: 'Test endpoint for user_endpoint', - }); - const result = JSON.parse(content.content[0].text); - - expect(result.tools).toHaveLength(1); - expect(result.tools[0].name).toBe('user_endpoint'); - expect(result.tools[0].description).toBe('Test endpoint for user_endpoint'); - }); - - it('should filter endpoints by partial description match', async () => { - const content = await toolsMap.list_api_endpoints.handler(fakeClient, { - search_query: 'endpoint for user', - }); - const result = JSON.parse(content.content[0].text); - - expect(result.tools).toHaveLength(1); - expect(result.tools[0].name).toBe('user_endpoint'); - }); - }); - - describe('get_api_endpoint_schema', () => { - it('should return schema for existing endpoint', async () => { - const content = await toolsMap.get_api_endpoint_schema.handler(fakeClient, { - endpoint: 'test_read_endpoint', - }); - const result = JSON.parse(content.content[0].text); - - expect(result).toEqual(endpoints[0]?.tool); - }); - - it('should throw error for non-existent endpoint', async () => { - await expect( - toolsMap.get_api_endpoint_schema.handler(fakeClient, { endpoint: 'non_existent_endpoint' }), - ).rejects.toThrow('Endpoint non_existent_endpoint not found'); - }); - - it('should throw error when no endpoint provided', async () => { - await expect(toolsMap.get_api_endpoint_schema.handler(fakeClient, undefined)).rejects.toThrow( - 'No endpoint provided', - ); - }); - }); - - describe('invoke_api_endpoint', () => { - it('should successfully invoke endpoint with valid arguments', async () => { - const mockHandler = endpoints[0]?.handler as jest.Mock; - mockHandler.mockClear(); - - await toolsMap.invoke_api_endpoint.handler(fakeClient, { - endpoint_name: 'test_read_endpoint', - args: { testParam: 'test value' }, - }); - - expect(mockHandler).toHaveBeenCalledWith(fakeClient, { testParam: 'test value' }); - }); - - it('should throw error for non-existent endpoint', async () => { - await expect( - toolsMap.invoke_api_endpoint.handler(fakeClient, { - endpoint_name: 'non_existent_endpoint', - args: { testParam: 'test value' }, - }), - ).rejects.toThrow(/Endpoint non_existent_endpoint not found/); - }); - - it('should throw error when no arguments provided', async () => { - await expect(toolsMap.invoke_api_endpoint.handler(fakeClient, undefined)).rejects.toThrow( - 'No endpoint provided', - ); - }); - - it('should throw error for invalid argument schema', async () => { - await expect( - toolsMap.invoke_api_endpoint.handler(fakeClient, { - endpoint_name: 'test_read_endpoint', - args: { wrongParam: 'test value' }, // Missing required testParam - }), - ).rejects.toThrow(/Invalid arguments for endpoint/); - }); - }); - - function toolOrError(name: string) { - const tool = tools.find((tool) => tool.tool.name === name); - if (!tool) throw new Error(`Tool ${name} not found`); - return tool; - } -}); - -function makeEndpoint( - name: string, - resource: string, - operation: 'read' | 'write', - tags: string[] = [], -): Endpoint { - return { - metadata: { - resource, - operation, - tags, - }, - tool: { - name, - description: `Test endpoint for ${name}`, - inputSchema: { - type: 'object', - properties: { - testParam: { type: 'string' }, - }, - required: ['testParam'], - }, - }, - handler: jest.fn().mockResolvedValue({ success: true }), - }; -} diff --git a/packages/mcp-server/tests/options.test.ts b/packages/mcp-server/tests/options.test.ts index 4d9b60c..532666a 100644 --- a/packages/mcp-server/tests/options.test.ts +++ b/packages/mcp-server/tests/options.test.ts @@ -1,6 +1,4 @@ import { parseCLIOptions, parseQueryOptions } from '../src/options'; -import { Filter } from '../src/tools'; -import { parseEmbeddedJSON } from '../src/compat'; // Mock process.argv const mockArgv = (args: string[]) => { @@ -12,338 +10,35 @@ const mockArgv = (args: string[]) => { }; describe('parseCLIOptions', () => { - it('should parse basic filter options', () => { - const cleanup = mockArgv([ - '--tool=test-tool', - '--resource=test-resource', - '--operation=read', - '--tag=test-tag', - ]); + it('default parsing should be stdio', () => { + const cleanup = mockArgv([]); const result = parseCLIOptions(); - expect(result.filters).toEqual([ - { type: 'tag', op: 'include', value: 'test-tag' }, - { type: 'resource', op: 'include', value: 'test-resource' }, - { type: 'tool', op: 'include', value: 'test-tool' }, - { type: 'operation', op: 'include', value: 'read' }, - ] as Filter[]); - - expect(result.capabilities).toEqual({}); - - expect(result.list).toBe(false); + expect(result.transport).toBe('stdio'); cleanup(); }); - it('should parse exclusion filters', () => { - const cleanup = mockArgv([ - '--no-tool=exclude-tool', - '--no-resource=exclude-resource', - '--no-operation=write', - '--no-tag=exclude-tag', - ]); + it('using http transport with a port', () => { + const cleanup = mockArgv(['--transport=http', '--port=2222']); const result = parseCLIOptions(); - expect(result.filters).toEqual([ - { type: 'tag', op: 'exclude', value: 'exclude-tag' }, - { type: 'resource', op: 'exclude', value: 'exclude-resource' }, - { type: 'tool', op: 'exclude', value: 'exclude-tool' }, - { type: 'operation', op: 'exclude', value: 'write' }, - ] as Filter[]); - - expect(result.capabilities).toEqual({}); - - cleanup(); - }); - - it('should parse client presets', () => { - const cleanup = mockArgv(['--client=openai-agents']); - - const result = parseCLIOptions(); - - expect(result.client).toEqual('openai-agents'); - - cleanup(); - }); - - it('should parse individual capabilities', () => { - const cleanup = mockArgv([ - '--capability=top-level-unions', - '--capability=valid-json', - '--capability=refs', - '--capability=unions', - '--capability=tool-name-length=40', - ]); - - const result = parseCLIOptions(); - - expect(result.capabilities).toEqual({ - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - toolNameLength: 40, - }); - - cleanup(); - }); - - it('should handle list option', () => { - const cleanup = mockArgv(['--list']); - - const result = parseCLIOptions(); - - expect(result.list).toBe(true); - - cleanup(); - }); - - it('should handle multiple filters of the same type', () => { - const cleanup = mockArgv(['--tool=tool1', '--tool=tool2', '--resource=res1', '--resource=res2']); - - const result = parseCLIOptions(); - - expect(result.filters).toEqual([ - { type: 'resource', op: 'include', value: 'res1' }, - { type: 'resource', op: 'include', value: 'res2' }, - { type: 'tool', op: 'include', value: 'tool1' }, - { type: 'tool', op: 'include', value: 'tool2' }, - ] as Filter[]); - - cleanup(); - }); - - it('should handle comma-separated values in array options', () => { - const cleanup = mockArgv([ - '--tool=tool1,tool2', - '--resource=res1,res2', - '--capability=top-level-unions,valid-json,unions', - ]); - - const result = parseCLIOptions(); - - expect(result.filters).toEqual([ - { type: 'resource', op: 'include', value: 'res1' }, - { type: 'resource', op: 'include', value: 'res2' }, - { type: 'tool', op: 'include', value: 'tool1' }, - { type: 'tool', op: 'include', value: 'tool2' }, - ] as Filter[]); - - expect(result.capabilities).toEqual({ - topLevelUnions: true, - validJson: true, - unions: true, - }); - - cleanup(); - }); - - it('should handle invalid tool-name-length format', () => { - const cleanup = mockArgv(['--capability=tool-name-length=invalid']); - - // Mock console.error to prevent output during test - const originalError = console.error; - console.error = jest.fn(); - - expect(() => parseCLIOptions()).toThrow(); - - console.error = originalError; - cleanup(); - }); - - it('should handle unknown capability', () => { - const cleanup = mockArgv(['--capability=unknown-capability']); - - // Mock console.error to prevent output during test - const originalError = console.error; - console.error = jest.fn(); - - expect(() => parseCLIOptions()).toThrow(); - - console.error = originalError; + expect(result.transport).toBe('http'); + expect(result.port).toBe('2222'); cleanup(); }); }); describe('parseQueryOptions', () => { - const defaultOptions = { - client: undefined, - includeDynamicTools: undefined, - includeCodeTools: undefined, - includeAllTools: undefined, - filters: [], - capabilities: { - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }, - }; - - it('should parse basic filter options from query string', () => { - const query = 'tool=test-tool&resource=test-resource&operation=read&tag=test-tag'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.filters).toEqual([ - { type: 'resource', op: 'include', value: 'test-resource' }, - { type: 'operation', op: 'include', value: 'read' }, - { type: 'tag', op: 'include', value: 'test-tag' }, - { type: 'tool', op: 'include', value: 'test-tool' }, - ]); - - expect(result.capabilities).toEqual({ - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }); - }); - - it('should parse exclusion filters from query string', () => { - const query = 'no_tool=exclude-tool&no_resource=exclude-resource&no_operation=write&no_tag=exclude-tag'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.filters).toEqual([ - { type: 'resource', op: 'exclude', value: 'exclude-resource' }, - { type: 'operation', op: 'exclude', value: 'write' }, - { type: 'tag', op: 'exclude', value: 'exclude-tag' }, - { type: 'tool', op: 'exclude', value: 'exclude-tool' }, - ]); - }); - - it('should parse client option from query string', () => { - const query = 'client=openai-agents'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.client).toBe('openai-agents'); - }); - - it('should parse client capabilities from query string', () => { - const query = 'capability=top-level-unions&capability=valid-json&capability=tool-name-length%3D40'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.capabilities).toEqual({ - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: 40, - }); - }); - - it('should parse no-capability options from query string', () => { - const query = 'no_capability=top-level-unions&no_capability=refs&no_capability=formats'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.capabilities).toEqual({ - topLevelUnions: false, - validJson: true, - refs: false, - unions: true, - formats: false, - toolNameLength: undefined, - }); - }); - - it('should parse tools options from query string', () => { - const query = 'tools=dynamic&tools=all'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.includeDynamicTools).toBe(true); - expect(result.includeAllTools).toBe(true); - }); - - it('should parse no-tools options from query string', () => { - const query = 'tools=dynamic&tools=all&no_tools=dynamic'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.includeDynamicTools).toBe(false); - expect(result.includeAllTools).toBe(true); - }); - - it('should handle array values in query string', () => { - const query = 'tool[]=tool1&tool[]=tool2&resource[]=res1&resource[]=res2'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.filters).toEqual([ - { type: 'resource', op: 'include', value: 'res1' }, - { type: 'resource', op: 'include', value: 'res2' }, - { type: 'tool', op: 'include', value: 'tool1' }, - { type: 'tool', op: 'include', value: 'tool2' }, - ]); - }); - - it('should merge with default options', () => { - const defaultWithFilters = { - ...defaultOptions, - filters: [{ type: 'tag' as const, op: 'include' as const, value: 'existing-tag' }], - client: 'cursor' as const, - includeDynamicTools: true, - }; - - const query = 'tool=new-tool&resource=new-resource'; - const result = parseQueryOptions(defaultWithFilters, query); - - expect(result.filters).toEqual([ - { type: 'tag', op: 'include', value: 'existing-tag' }, - { type: 'resource', op: 'include', value: 'new-resource' }, - { type: 'tool', op: 'include', value: 'new-tool' }, - ]); - - expect(result.client).toBe('cursor'); - expect(result.includeDynamicTools).toBe(true); - }); - - it('should override client from default options', () => { - const defaultWithClient = { - ...defaultOptions, - client: 'cursor' as const, - }; + const defaultOptions = {}; - const query = 'client=openai-agents'; - const result = parseQueryOptions(defaultWithClient, query); - - expect(result.client).toBe('openai-agents'); - }); - - it('should merge capabilities with default options', () => { - const defaultWithCapabilities = { - ...defaultOptions, - capabilities: { - topLevelUnions: false, - validJson: false, - refs: true, - unions: true, - formats: true, - toolNameLength: 30, - }, - }; - - const query = 'capability=top-level-unions&no_capability=refs'; - const result = parseQueryOptions(defaultWithCapabilities, query); - - expect(result.capabilities).toEqual({ - topLevelUnions: true, - validJson: false, - refs: false, - unions: true, - formats: true, - toolNameLength: 30, - }); - }); - - it('should handle empty query string', () => { + it('default parsing should be empty', () => { const query = ''; const result = parseQueryOptions(defaultOptions, query); - expect(result).toEqual(defaultOptions); + expect(result).toBe({}); }); it('should handle invalid query string gracefully', () => { @@ -352,189 +47,4 @@ describe('parseQueryOptions', () => { // Should throw due to Zod validation for invalid operation expect(() => parseQueryOptions(defaultOptions, query)).toThrow(); }); - - it('should preserve default undefined values when not specified', () => { - const defaultWithUndefined = { - ...defaultOptions, - client: undefined, - includeDynamicTools: undefined, - includeAllTools: undefined, - }; - - const query = 'tool=test-tool'; - const result = parseQueryOptions(defaultWithUndefined, query); - - expect(result.client).toBeUndefined(); - expect(result.includeDynamicTools).toBeFalsy(); - expect(result.includeAllTools).toBeFalsy(); - }); - - it('should handle complex query with mixed include and exclude filters', () => { - const query = - 'tool=include-tool&no_tool=exclude-tool&resource=include-res&no_resource=exclude-res&operation=read&tag=include-tag&no_tag=exclude-tag'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.filters).toEqual([ - { type: 'resource', op: 'include', value: 'include-res' }, - { type: 'operation', op: 'include', value: 'read' }, - { type: 'tag', op: 'include', value: 'include-tag' }, - { type: 'tool', op: 'include', value: 'include-tool' }, - { type: 'resource', op: 'exclude', value: 'exclude-res' }, - { type: 'tag', op: 'exclude', value: 'exclude-tag' }, - { type: 'tool', op: 'exclude', value: 'exclude-tool' }, - ]); - }); - - it('code tools are enabled on http servers with default option set', () => { - const query = 'tools=code'; - const result = parseQueryOptions({ ...defaultOptions, includeCodeTools: true }, query); - - expect(result.includeCodeTools).toBe(true); - }); - - it('code tools are prevented on http servers when no default option set', () => { - const query = 'tools=code'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.includeCodeTools).toBe(undefined); - }); - - it('code tools are prevented on http servers when default option is explicitly false', () => { - const query = 'tools=code'; - const result = parseQueryOptions({ ...defaultOptions, includeCodeTools: false }, query); - - expect(result.includeCodeTools).toBe(false); - }); -}); - -describe('parseEmbeddedJSON', () => { - it('should not change non-string values', () => { - const args = { - numberProp: 42, - booleanProp: true, - objectProp: { nested: 'value' }, - arrayProp: [1, 2, 3], - nullProp: null, - undefinedProp: undefined, - }; - const schema = {}; - - const result = parseEmbeddedJSON(args, schema); - - expect(result).toBe(args); // Should return original object since no changes made - expect(result['numberProp']).toBe(42); - expect(result['booleanProp']).toBe(true); - expect(result['objectProp']).toEqual({ nested: 'value' }); - expect(result['arrayProp']).toEqual([1, 2, 3]); - expect(result['nullProp']).toBe(null); - expect(result['undefinedProp']).toBe(undefined); - }); - - it('should parse valid JSON objects in string properties', () => { - const args = { - jsonObjectString: '{"key": "value", "number": 123}', - regularString: 'not json', - }; - const schema = {}; - - const result = parseEmbeddedJSON(args, schema); - - expect(result).not.toBe(args); // Should return new object since changes were made - expect(result['jsonObjectString']).toEqual({ key: 'value', number: 123 }); - expect(result['regularString']).toBe('not json'); - }); - - it('should leave invalid JSON in string properties unchanged', () => { - const args = { - invalidJson1: '{"key": value}', // Missing quotes around value - invalidJson2: '{key: "value"}', // Missing quotes around key - invalidJson3: '{"key": "value",}', // Trailing comma - invalidJson4: 'just a regular string', - emptyString: '', - }; - const schema = {}; - - const result = parseEmbeddedJSON(args, schema); - - expect(result).toBe(args); // Should return original object since no changes made - expect(result['invalidJson1']).toBe('{"key": value}'); - expect(result['invalidJson2']).toBe('{key: "value"}'); - expect(result['invalidJson3']).toBe('{"key": "value",}'); - expect(result['invalidJson4']).toBe('just a regular string'); - expect(result['emptyString']).toBe(''); - }); - - it('should not parse JSON primitives in string properties', () => { - const args = { - numberString: '123', - floatString: '45.67', - negativeNumberString: '-89', - booleanTrueString: 'true', - booleanFalseString: 'false', - nullString: 'null', - jsonArrayString: '[1, 2, 3, "test"]', - regularString: 'not json', - }; - const schema = {}; - - const result = parseEmbeddedJSON(args, schema); - - expect(result).toBe(args); // Should return original object since no changes made - expect(result['numberString']).toBe('123'); - expect(result['floatString']).toBe('45.67'); - expect(result['negativeNumberString']).toBe('-89'); - expect(result['booleanTrueString']).toBe('true'); - expect(result['booleanFalseString']).toBe('false'); - expect(result['nullString']).toBe('null'); - expect(result['jsonArrayString']).toBe('[1, 2, 3, "test"]'); - expect(result['regularString']).toBe('not json'); - }); - - it('should handle mixed valid objects and other JSON types', () => { - const args = { - validObject: '{"success": true}', - invalidObject: '{"missing": quote}', - validNumber: '42', - validArray: '[1, 2, 3]', - keepAsString: 'hello world', - nonString: 123, - }; - const schema = {}; - - const result = parseEmbeddedJSON(args, schema); - - expect(result).not.toBe(args); // Should return new object since some changes were made - expect(result['validObject']).toEqual({ success: true }); - expect(result['invalidObject']).toBe('{"missing": quote}'); - expect(result['validNumber']).toBe('42'); // Not parsed, remains string - expect(result['validArray']).toBe('[1, 2, 3]'); // Not parsed, remains string - expect(result['keepAsString']).toBe('hello world'); - expect(result['nonString']).toBe(123); - }); - - it('should return original object when no strings are present', () => { - const args = { - number: 42, - boolean: true, - object: { key: 'value' }, - }; - const schema = {}; - - const result = parseEmbeddedJSON(args, schema); - - expect(result).toBe(args); // Should return original object since no changes made - }); - - it('should return original object when all strings are invalid JSON', () => { - const args = { - string1: 'hello', - string2: 'world', - string3: 'not json at all', - }; - const schema = {}; - - const result = parseEmbeddedJSON(args, schema); - - expect(result).toBe(args); // Should return original object since no changes made - }); }); diff --git a/packages/mcp-server/tests/tools.test.ts b/packages/mcp-server/tests/tools.test.ts deleted file mode 100644 index cfff24a..0000000 --- a/packages/mcp-server/tests/tools.test.ts +++ /dev/null @@ -1,225 +0,0 @@ -import { Endpoint, Filter, Metadata, query } from '../src/tools'; - -describe('Endpoint filtering', () => { - const endpoints: Endpoint[] = [ - endpoint({ - resource: 'user', - operation: 'read', - tags: ['admin'], - toolName: 'retrieve_user', - }), - endpoint({ - resource: 'user.profile', - operation: 'write', - tags: [], - toolName: 'create_user_profile', - }), - endpoint({ - resource: 'user.profile', - operation: 'read', - tags: [], - toolName: 'get_user_profile', - }), - endpoint({ - resource: 'user.roles.permissions', - operation: 'write', - tags: ['admin', 'security'], - toolName: 'update_user_role_permissions', - }), - endpoint({ - resource: 'documents.metadata.tags', - operation: 'write', - tags: ['taxonomy', 'metadata'], - toolName: 'create_document_metadata_tags', - }), - endpoint({ - resource: 'organization.settings', - operation: 'read', - tags: ['admin', 'configuration'], - toolName: 'get_organization_settings', - }), - ]; - - const tests: { name: string; filters: Filter[]; expected: string[] }[] = [ - { - name: 'match none', - filters: [], - expected: [], - }, - - // Resource tests - { - name: 'simple resource', - filters: [{ type: 'resource', op: 'include', value: 'user' }], - expected: ['retrieve_user'], - }, - { - name: 'exclude resource', - filters: [{ type: 'resource', op: 'exclude', value: 'user' }], - expected: [ - 'create_user_profile', - 'get_user_profile', - 'update_user_role_permissions', - 'create_document_metadata_tags', - 'get_organization_settings', - ], - }, - { - name: 'resource and subresources', - filters: [{ type: 'resource', op: 'include', value: 'user*' }], - expected: ['retrieve_user', 'create_user_profile', 'get_user_profile', 'update_user_role_permissions'], - }, - { - name: 'just subresources', - filters: [{ type: 'resource', op: 'include', value: 'user.*' }], - expected: ['create_user_profile', 'get_user_profile', 'update_user_role_permissions'], - }, - { - name: 'specific subresource', - filters: [{ type: 'resource', op: 'include', value: 'user.roles.permissions' }], - expected: ['update_user_role_permissions'], - }, - { - name: 'deep wildcard match', - filters: [{ type: 'resource', op: 'include', value: '*.*.tags' }], - expected: ['create_document_metadata_tags'], - }, - - // Operation tests - { - name: 'read operation', - filters: [{ type: 'operation', op: 'include', value: 'read' }], - expected: ['retrieve_user', 'get_user_profile', 'get_organization_settings'], - }, - { - name: 'write operation', - filters: [{ type: 'operation', op: 'include', value: 'write' }], - expected: ['create_user_profile', 'update_user_role_permissions', 'create_document_metadata_tags'], - }, - { - name: 'resource and operation combined', - filters: [ - { type: 'resource', op: 'include', value: 'user.profile' }, - { type: 'operation', op: 'exclude', value: 'write' }, - ], - expected: ['get_user_profile'], - }, - - // Tag tests - { - name: 'admin tag', - filters: [{ type: 'tag', op: 'include', value: 'admin' }], - expected: ['retrieve_user', 'update_user_role_permissions', 'get_organization_settings'], - }, - { - name: 'taxonomy tag', - filters: [{ type: 'tag', op: 'include', value: 'taxonomy' }], - expected: ['create_document_metadata_tags'], - }, - { - name: 'multiple tags (OR logic)', - filters: [ - { type: 'tag', op: 'include', value: 'admin' }, - { type: 'tag', op: 'include', value: 'security' }, - ], - expected: ['retrieve_user', 'update_user_role_permissions', 'get_organization_settings'], - }, - { - name: 'excluding a tag', - filters: [ - { type: 'tag', op: 'include', value: 'admin' }, - { type: 'tag', op: 'exclude', value: 'security' }, - ], - expected: ['retrieve_user', 'get_organization_settings'], - }, - - // Tool name tests - { - name: 'tool name match', - filters: [{ type: 'tool', op: 'include', value: 'get_organization_settings' }], - expected: ['get_organization_settings'], - }, - { - name: 'two tools match', - filters: [ - { type: 'tool', op: 'include', value: 'get_organization_settings' }, - { type: 'tool', op: 'include', value: 'create_user_profile' }, - ], - expected: ['create_user_profile', 'get_organization_settings'], - }, - { - name: 'excluding tool by name', - filters: [ - { type: 'resource', op: 'include', value: 'user*' }, - { type: 'tool', op: 'exclude', value: 'retrieve_user' }, - ], - expected: ['create_user_profile', 'get_user_profile', 'update_user_role_permissions'], - }, - - // Complex combinations - { - name: 'complex filter: read operations with admin tag', - filters: [ - { type: 'operation', op: 'include', value: 'read' }, - { type: 'tag', op: 'include', value: 'admin' }, - ], - expected: [ - 'retrieve_user', - 'get_user_profile', - 'update_user_role_permissions', - 'get_organization_settings', - ], - }, - { - name: 'complex filter: user resources with no tags', - filters: [ - { type: 'resource', op: 'include', value: 'user.profile' }, - { type: 'tag', op: 'exclude', value: 'admin' }, - ], - expected: ['create_user_profile', 'get_user_profile'], - }, - { - name: 'complex filter: user resources and tags', - filters: [ - { type: 'resource', op: 'include', value: 'user.profile' }, - { type: 'tag', op: 'include', value: 'admin' }, - ], - expected: [ - 'retrieve_user', - 'create_user_profile', - 'get_user_profile', - 'update_user_role_permissions', - 'get_organization_settings', - ], - }, - ]; - - tests.forEach((test) => { - it(`filters by ${test.name}`, () => { - const filtered = query(test.filters, endpoints); - expect(filtered.map((e) => e.tool.name)).toEqual(test.expected); - }); - }); -}); - -function endpoint({ - resource, - operation, - tags, - toolName, -}: { - resource: string; - operation: Metadata['operation']; - tags: string[]; - toolName: string; -}): Endpoint { - return { - metadata: { - resource, - operation, - tags, - }, - tool: { name: toolName, inputSchema: { type: 'object', properties: {} } }, - handler: jest.fn(), - }; -} diff --git a/src/version.ts b/src/version.ts index baebd9d..35b0d1b 100644 --- a/src/version.ts +++ b/src/version.ts @@ -1 +1 @@ -export const VERSION = '0.1.0-alpha.16'; // x-release-please-version +export const VERSION = '0.1.0-alpha.17'; // x-release-please-version