From 8366078a62fddab898e1f340a67d606ce788b118 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 2 Apr 2025 16:38:30 +0000 Subject: [PATCH] chore: configure new SDK language --- .github/workflows/publish-npm.yml | 11 +- .stats.yml | 2 +- eslint.config.mjs | 2 +- packages/mcp-server/README.md | 32 ++ packages/mcp-server/build | 45 ++ packages/mcp-server/package.json | 74 +++ .../scripts/postprocess-dist-package-json.cjs | 12 + packages/mcp-server/src/index.ts | 18 + packages/mcp-server/src/server.ts | 86 +++ .../generate-content-applications.ts | 95 ++++ .../graphs/list-applications-graphs.ts | 24 + .../graphs/update-applications-graphs.ts | 34 ++ .../jobs/create-applications-jobs.ts | 48 ++ .../jobs/list-applications-jobs.ts | 39 ++ .../jobs/retrieve-applications-jobs.ts | 24 + .../jobs/retry-applications-jobs.ts | 25 + .../tools/applications/list-applications.ts | 45 ++ .../applications/retrieve-applications.ts | 25 + .../mcp-server/src/tools/chat/chat-chat.ts | 506 ++++++++++++++++++ .../tools/completions/create-completions.ts | 137 +++++ .../src/tools/files/delete-files.ts | 24 + .../src/tools/files/download-files.ts | 24 + .../mcp-server/src/tools/files/list-files.ts | 52 ++ .../src/tools/files/retrieve-files.ts | 24 + .../mcp-server/src/tools/files/retry-files.ts | 28 + .../src/tools/files/upload-files.ts | 27 + .../tools/graphs/add-file-to-graph-graphs.ts | 28 + .../src/tools/graphs/create-graphs.ts | 31 ++ .../src/tools/graphs/delete-graphs.ts | 24 + .../src/tools/graphs/list-graphs.ts | 42 ++ .../src/tools/graphs/question-graphs.ts | 73 +++ .../graphs/remove-file-from-graph-graphs.ts | 27 + .../src/tools/graphs/retrieve-graphs.ts | 24 + .../src/tools/graphs/update-graphs.ts | 34 ++ packages/mcp-server/src/tools/index.ts | 76 +++ .../src/tools/models/list-models.ts | 20 + .../comprehend/medical-tools-comprehend.ts | 33 ++ .../tools/context-aware-splitting-tools.ts | 33 ++ .../src/tools/tools/parse-pdf-tools.ts | 30 ++ .../src/tools/vision/analyze-vision.ts | 52 ++ packages/mcp-server/tsc-multi.json | 7 + packages/mcp-server/tsconfig.build.json | 18 + packages/mcp-server/tsconfig.dist-src.json | 11 + packages/mcp-server/tsconfig.json | 37 ++ release-please-config.json | 8 +- scripts/build-all | 12 + scripts/publish-packages.ts | 102 ++++ scripts/utils/make-dist-package-json.cjs | 8 + 48 files changed, 2189 insertions(+), 4 deletions(-) create mode 100644 packages/mcp-server/README.md create mode 100644 packages/mcp-server/build create mode 100644 packages/mcp-server/package.json create mode 100644 packages/mcp-server/scripts/postprocess-dist-package-json.cjs create mode 100644 packages/mcp-server/src/index.ts create mode 100644 packages/mcp-server/src/server.ts create mode 100644 packages/mcp-server/src/tools/applications/generate-content-applications.ts create mode 100644 packages/mcp-server/src/tools/applications/graphs/list-applications-graphs.ts create mode 100644 packages/mcp-server/src/tools/applications/graphs/update-applications-graphs.ts create mode 100644 packages/mcp-server/src/tools/applications/jobs/create-applications-jobs.ts create mode 100644 packages/mcp-server/src/tools/applications/jobs/list-applications-jobs.ts create mode 100644 packages/mcp-server/src/tools/applications/jobs/retrieve-applications-jobs.ts create mode 100644 packages/mcp-server/src/tools/applications/jobs/retry-applications-jobs.ts create mode 100644 packages/mcp-server/src/tools/applications/list-applications.ts create mode 100644 packages/mcp-server/src/tools/applications/retrieve-applications.ts create mode 100644 packages/mcp-server/src/tools/chat/chat-chat.ts create mode 100644 packages/mcp-server/src/tools/completions/create-completions.ts create mode 100644 packages/mcp-server/src/tools/files/delete-files.ts create mode 100644 packages/mcp-server/src/tools/files/download-files.ts create mode 100644 packages/mcp-server/src/tools/files/list-files.ts create mode 100644 packages/mcp-server/src/tools/files/retrieve-files.ts create mode 100644 packages/mcp-server/src/tools/files/retry-files.ts create mode 100644 packages/mcp-server/src/tools/files/upload-files.ts create mode 100644 packages/mcp-server/src/tools/graphs/add-file-to-graph-graphs.ts create mode 100644 packages/mcp-server/src/tools/graphs/create-graphs.ts create mode 100644 packages/mcp-server/src/tools/graphs/delete-graphs.ts create mode 100644 packages/mcp-server/src/tools/graphs/list-graphs.ts create mode 100644 packages/mcp-server/src/tools/graphs/question-graphs.ts create mode 100644 packages/mcp-server/src/tools/graphs/remove-file-from-graph-graphs.ts create mode 100644 packages/mcp-server/src/tools/graphs/retrieve-graphs.ts create mode 100644 packages/mcp-server/src/tools/graphs/update-graphs.ts create mode 100644 packages/mcp-server/src/tools/index.ts create mode 100644 packages/mcp-server/src/tools/models/list-models.ts create mode 100644 packages/mcp-server/src/tools/tools/comprehend/medical-tools-comprehend.ts create mode 100644 packages/mcp-server/src/tools/tools/context-aware-splitting-tools.ts create mode 100644 packages/mcp-server/src/tools/tools/parse-pdf-tools.ts create mode 100644 packages/mcp-server/src/tools/vision/analyze-vision.ts create mode 100644 packages/mcp-server/tsc-multi.json create mode 100644 packages/mcp-server/tsconfig.build.json create mode 100644 packages/mcp-server/tsconfig.dist-src.json create mode 100644 packages/mcp-server/tsconfig.json create mode 100755 scripts/build-all create mode 100644 scripts/publish-packages.ts diff --git a/.github/workflows/publish-npm.yml b/.github/workflows/publish-npm.yml index e9b6ebe8..a6ba2a3d 100644 --- a/.github/workflows/publish-npm.yml +++ b/.github/workflows/publish-npm.yml @@ -4,6 +4,10 @@ name: Publish NPM on: workflow_dispatch: + inputs: + path: + description: The path to run the release in, e.g. '.' or 'packages/mcp-server' + required: true release: types: [published] @@ -27,6 +31,11 @@ jobs: - name: Publish to NPM run: | - bash ./bin/publish-npm + if [ -n "${{ github.event.inputs.path }}" ]; then + PATHS_RELEASED='[\"${{ github.event.inputs.path }}\"]' + else + PATHS_RELEASED='[\".\", \"packages/mcp-server\"]' + fi + yarn tsn scripts/publish-packages.ts "{ \"paths_released\": \"$PATHS_RELEASED\" }" env: NPM_TOKEN: ${{ secrets.WRITER_NPM_TOKEN || secrets.NPM_TOKEN }} diff --git a/.stats.yml b/.stats.yml index 79dab344..cda70cf6 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 30 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/writerai%2Fwriter-d15316b8a3a086ae9ec8eea0d436b0885262df9bcf23b9587ad059e50357c220.yml openapi_spec_hash: 4f81a4f4840438f80eff345e76ead962 -config_hash: b3310cd2944d74a3599e847847226a42 +config_hash: 8b4e4a902369723a665b1d265169a3f1 diff --git a/eslint.config.mjs b/eslint.config.mjs index 7f7c7265..95b4ce40 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -34,7 +34,7 @@ export default tseslint.config( }, }, { - files: ['tests/**', 'examples/**'], + files: ['tests/**', 'examples/**', 'packages/**'], rules: { 'no-restricted-imports': 'off', }, diff --git a/packages/mcp-server/README.md b/packages/mcp-server/README.md new file mode 100644 index 00000000..d3f1a0e9 --- /dev/null +++ b/packages/mcp-server/README.md @@ -0,0 +1,32 @@ +# Writer TypeScript MCP Server + +It is generated with [Stainless](https://www.stainless.com/). + +## Installation + +### Via Claude Desktop + +See [the user guide](https://modelcontextprotocol.io/quickstart/user) for setup. + +Once it's set up, add your MCP server to your `claude_desktop_config.json` file to enable it. + +The configuration file should be at: + +- macOS: `~/Library/Application Support/Claude/claude_desktop_config.json` +- Windows: `%APPDATA%\Claude\claude_desktop_config.json` + +Add the following value to your `mcpServers` section. Make sure to provide any necessary environment variables (like API keys) as well. + +```json +{ + "mcpServers": { + "writer_sdk_api": { + "command": "npx", + "args": ["-y", "writer-sdk-mcp"], + "env": { + "WRITER_API_KEY": "My API Key" + } + } + } +} +``` diff --git a/packages/mcp-server/build b/packages/mcp-server/build new file mode 100644 index 00000000..4667b22c --- /dev/null +++ b/packages/mcp-server/build @@ -0,0 +1,45 @@ +#!/usr/bin/env bash +set -exuo pipefail + +rm -rf dist; mkdir dist + +# Copy src to dist/src and build from dist/src into dist, so that +# the source map for index.js.map will refer to ./src/index.ts etc +cp -rp src README.md dist + +for file in LICENSE; do + if [ -e "../../${file}" ]; then cp "../../${file}" dist; fi +done + +for file in CHANGELOG.md; do + if [ -e "${file}" ]; then cp "${file}" dist; fi +done + +# this converts the export map paths for the dist directory +# and does a few other minor things +PKG_JSON_PATH=../../packages/mcp-server/package.json node ../../scripts/utils/make-dist-package-json.cjs > dist/package.json + +# updates the `writer-sdk` dependency to point to NPM +node scripts/postprocess-dist-package-json.cjs + +# build to .js/.mjs/.d.ts files +npm exec tsc-multi +# we need to add exports = module.exports = Anthropic TypeScript to index.js; +# No way to get that from index.ts because it would cause compile errors +# when building .mjs +DIST_PATH=./dist node ../../scripts/utils/fix-index-exports.cjs + +# with "moduleResolution": "nodenext", if ESM resolves to index.d.ts, +# it'll have TS errors on the default import. But if it resolves to +# index.d.mts the default import will work (even though both files have +# the same export default statement) +cp dist/index.d.ts dist/index.d.mts +cp tsconfig.dist-src.json dist/src/tsconfig.json + +# Add proper Node.js shebang to the top of the file +sed -i.bak '1s;^;#!/usr/bin/env node\n;' dist/index.js +rm dist/index.js.bak + +chmod +x dist/index.js + +DIST_PATH=./dist PKG_IMPORT_PATH=writer-sdk-mcp/ node ../../scripts/utils/postprocess-files.cjs diff --git a/packages/mcp-server/package.json b/packages/mcp-server/package.json new file mode 100644 index 00000000..3f805df4 --- /dev/null +++ b/packages/mcp-server/package.json @@ -0,0 +1,74 @@ +{ + "name": "writer-sdk-mcp", + "version": "2.1.0-rc.1", + "description": "The official MCP Server for the Writer API", + "author": "Writer ", + "types": "dist/index.d.ts", + "main": "dist/index.js", + "type": "commonjs", + "repository": "github:writer/writer-node", + "license": "Apache-2.0", + "packageManager": "yarn@1.22.22", + "private": false, + "scripts": { + "test": "echo 'no tests defined yet' && exit 1", + "build": "bash ./build", + "prepack": "echo 'to pack, run yarn build && (cd dist; yarn pack)' && exit 1", + "prepublishOnly": "echo 'to publish, run yarn build && (cd dist; yarn publish)' && exit 1", + "format": "prettier --write --cache --cache-strategy metadata . !dist", + "prepare": "npm run build", + "tsn": "ts-node -r tsconfig-paths/register", + "lint": "eslint --ext ts,js .", + "fix": "eslint --fix --ext ts,js ." + }, + "dependencies": { + "writer-sdk": "file:../../dist/", + "@modelcontextprotocol/sdk": "^1.6.1" + }, + "bin": { + "mcp-server": "dist/index.js" + }, + "devDependencies": { + "@types/jest": "^29.4.0", + "@typescript-eslint/eslint-plugin": "^6.7.0", + "@typescript-eslint/parser": "^6.7.0", + "eslint": "^8.49.0", + "eslint-plugin-prettier": "^5.0.1", + "eslint-plugin-unused-imports": "^3.0.0", + "jest": "^29.4.0", + "prettier": "^3.0.0", + "ts-jest": "^29.1.0", + "ts-morph": "^19.0.0", + "ts-node": "^10.5.0", + "tsc-multi": "^1.1.0", + "tsconfig-paths": "^4.0.0", + "typescript": "^4.8.2" + }, + "imports": { + "writer-sdk-mcp": ".", + "writer-sdk-mcp/*": "./src/*" + }, + "exports": { + ".": { + "require": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + }, + "types": "./dist/index.d.mts", + "default": "./dist/index.mjs" + }, + "./*.mjs": { + "types": "./dist/*.d.ts", + "default": "./dist/*.mjs" + }, + "./*.js": { + "types": "./dist/*.d.ts", + "default": "./dist/*.js" + }, + "./*": { + "types": "./dist/*.d.ts", + "require": "./dist/*.js", + "default": "./dist/*.mjs" + } + } +} diff --git a/packages/mcp-server/scripts/postprocess-dist-package-json.cjs b/packages/mcp-server/scripts/postprocess-dist-package-json.cjs new file mode 100644 index 00000000..298986f8 --- /dev/null +++ b/packages/mcp-server/scripts/postprocess-dist-package-json.cjs @@ -0,0 +1,12 @@ +const fs = require('fs'); +const pkgJson = require('../dist/package.json'); +const parentPkgJson = require('../../../package.json'); + +for (const dep in pkgJson.dependencies) { + // ensure we point to NPM instead of a local directory + if (dep === 'writer-sdk') { + pkgJson.dependencies[dep] = '^' + parentPkgJson.version; + } +} + +fs.writeFileSync('dist/package.json', JSON.stringify(pkgJson, null, 2)); diff --git a/packages/mcp-server/src/index.ts b/packages/mcp-server/src/index.ts new file mode 100644 index 00000000..038f6a81 --- /dev/null +++ b/packages/mcp-server/src/index.ts @@ -0,0 +1,18 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; +import { server, init } from './server'; + +async function main() { + init({ server }); + const transport = new StdioServerTransport(); + await server.connect(transport); + console.error('MCP Server running on stdio'); +} + +if (require.main === module) { + main().catch((error) => { + console.error('Fatal error in main():', error); + process.exit(1); + }); +} diff --git a/packages/mcp-server/src/server.ts b/packages/mcp-server/src/server.ts new file mode 100644 index 00000000..06572eb8 --- /dev/null +++ b/packages/mcp-server/src/server.ts @@ -0,0 +1,86 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Server } from '@modelcontextprotocol/sdk/server/index.js'; +import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; +import { tools, handlers, HandlerFunction } from './tools'; +import { CallToolRequestSchema, ListToolsRequestSchema, Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; +export { tools, handlers } from './tools'; + +// Create server instance +export const server = new McpServer( + { + name: 'writer_sdk_api', + version: '2.1.0-rc.1', + }, + { + capabilities: { + tools: {}, + }, + }, +); + +/** + * Initializes the provided MCP Server with the given tools and handlers. + * If not provided, the default client, tools and handlers will be used. + */ +export function init(params: { + server: Server | McpServer; + client?: Writer; + tools?: Tool[]; + handlers?: Record; +}) { + const server = params.server instanceof McpServer ? params.server.server : params.server; + const providedTools = params.tools || tools; + const providedHandlers = params.handlers || handlers; + const client = params.client || new Writer({}); + + server.setRequestHandler(ListToolsRequestSchema, async () => { + return { + tools: providedTools, + }; + }); + + server.setRequestHandler(CallToolRequestSchema, async (request) => { + const { name, arguments: args } = request.params; + + const handler = providedHandlers[name]; + if (!handler) { + throw new Error(`Unknown tool: ${name}`); + } + + return executeHandler(handler, client, args); + }); +} + +/** + * Runs the provided handler with the given client and arguments. + */ +export async function executeHandler( + handler: HandlerFunction, + client: Writer, + args: Record | undefined, +) { + const result = await handler(client, args || {}); + return { + content: [ + { + type: 'text', + text: JSON.stringify(result, null, 2), + }, + ], + }; +} + +export const readEnv = (env: string): string => { + let envValue = undefined; + if (typeof (globalThis as any).process !== 'undefined') { + envValue = (globalThis as any).process.env?.[env]?.trim(); + } else if (typeof (globalThis as any).Deno !== 'undefined') { + envValue = (globalThis as any).Deno.env?.get?.(env)?.trim(); + } + if (envValue === undefined) { + throw new Error(`Environment variable ${env} is not set`); + } + return envValue; +}; diff --git a/packages/mcp-server/src/tools/applications/generate-content-applications.ts b/packages/mcp-server/src/tools/applications/generate-content-applications.ts new file mode 100644 index 00000000..a42d8851 --- /dev/null +++ b/packages/mcp-server/src/tools/applications/generate-content-applications.ts @@ -0,0 +1,95 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'generate_content_applications', + description: 'Generate content from an existing no-code application with inputs.', + inputSchema: { + type: 'object', + anyOf: [ + { + type: 'object', + properties: { + application_id: { + type: 'string', + }, + inputs: { + type: 'array', + items: { + type: 'object', + title: 'generate_application_input', + properties: { + id: { + type: 'string', + description: + 'The unique identifier for the input field from the application. All input types from the No-code application are supported (i.e. Text input, Dropdown, File upload, Image input). The identifier should be the name of the input type.', + }, + value: { + type: 'array', + description: + 'The value for the input field. \n\nIf the input type is "File upload", you must pass the `file_id` of an uploaded file. You cannot pass a file object directly. See the [file upload endpoint](/api-guides/api-reference/file-api/upload-files) for instructions on uploading files or the [list files endpoint](/api-guides/api-reference/file-api/get-all-files) for how to see a list of uploaded files and their IDs.', + items: { + type: 'string', + }, + }, + }, + required: ['id', 'value'], + }, + }, + stream: { + type: 'string', + description: + 'Indicates whether the response should be streamed. Currently only supported for research assistant applications.', + enum: [false], + }, + }, + }, + { + type: 'object', + properties: { + application_id: { + type: 'string', + }, + inputs: { + type: 'array', + items: { + type: 'object', + title: 'generate_application_input', + properties: { + id: { + type: 'string', + description: + 'The unique identifier for the input field from the application. All input types from the No-code application are supported (i.e. Text input, Dropdown, File upload, Image input). The identifier should be the name of the input type.', + }, + value: { + type: 'array', + description: + 'The value for the input field. \n\nIf the input type is "File upload", you must pass the `file_id` of an uploaded file. You cannot pass a file object directly. See the [file upload endpoint](/api-guides/api-reference/file-api/upload-files) for instructions on uploading files or the [list files endpoint](/api-guides/api-reference/file-api/get-all-files) for how to see a list of uploaded files and their IDs.', + items: { + type: 'string', + }, + }, + }, + required: ['id', 'value'], + }, + }, + stream: { + type: 'string', + description: + 'Indicates whether the response should be streamed. Currently only supported for research assistant applications.', + enum: [true], + }, + }, + }, + ], + }, +}; + +export const handler = (client: Writer, args: any) => { + const { application_id, ...body } = args; + return client.applications.generateContent(application_id, body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/applications/graphs/list-applications-graphs.ts b/packages/mcp-server/src/tools/applications/graphs/list-applications-graphs.ts new file mode 100644 index 00000000..b5544917 --- /dev/null +++ b/packages/mcp-server/src/tools/applications/graphs/list-applications-graphs.ts @@ -0,0 +1,24 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'list_applications_graphs', + description: 'Retrieve Knowledge Graphs associated with a no-code chat application.', + inputSchema: { + type: 'object', + properties: { + application_id: { + type: 'string', + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { application_id } = args; + return client.applications.graphs.list(application_id); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/applications/graphs/update-applications-graphs.ts b/packages/mcp-server/src/tools/applications/graphs/update-applications-graphs.ts new file mode 100644 index 00000000..5cb49f2b --- /dev/null +++ b/packages/mcp-server/src/tools/applications/graphs/update-applications-graphs.ts @@ -0,0 +1,34 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'update_applications_graphs', + description: + 'Updates the Knowledge Graphs listed and associates them with the no-code chat app to be used.', + inputSchema: { + type: 'object', + properties: { + application_id: { + type: 'string', + }, + graph_ids: { + type: 'array', + description: + 'A list of Knowledge Graph IDs to associate with the application. Note that this will replace the existing list of Knowledge Graphs associated with the application, not add to it.', + items: { + type: 'string', + description: 'The unique identifier for the Knowledge Graph.', + }, + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { application_id, ...body } = args; + return client.applications.graphs.update(application_id, body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/applications/jobs/create-applications-jobs.ts b/packages/mcp-server/src/tools/applications/jobs/create-applications-jobs.ts new file mode 100644 index 00000000..d7e8414e --- /dev/null +++ b/packages/mcp-server/src/tools/applications/jobs/create-applications-jobs.ts @@ -0,0 +1,48 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'create_applications_jobs', + description: 'Generate content asynchronously from an existing application with inputs.', + inputSchema: { + type: 'object', + properties: { + application_id: { + type: 'string', + }, + inputs: { + type: 'array', + description: 'A list of input objects to generate content for.', + items: { + type: 'object', + title: 'generate_application_input', + properties: { + id: { + type: 'string', + description: + 'The unique identifier for the input field from the application. All input types from the No-code application are supported (i.e. Text input, Dropdown, File upload, Image input). The identifier should be the name of the input type.', + }, + value: { + type: 'array', + description: + 'The value for the input field. \n\nIf the input type is "File upload", you must pass the `file_id` of an uploaded file. You cannot pass a file object directly. See the [file upload endpoint](/api-guides/api-reference/file-api/upload-files) for instructions on uploading files or the [list files endpoint](/api-guides/api-reference/file-api/get-all-files) for how to see a list of uploaded files and their IDs.', + items: { + type: 'string', + }, + }, + }, + required: ['id', 'value'], + }, + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { application_id, ...body } = args; + return client.applications.jobs.create(application_id, body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/applications/jobs/list-applications-jobs.ts b/packages/mcp-server/src/tools/applications/jobs/list-applications-jobs.ts new file mode 100644 index 00000000..fc46c36f --- /dev/null +++ b/packages/mcp-server/src/tools/applications/jobs/list-applications-jobs.ts @@ -0,0 +1,39 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'list_applications_jobs', + description: + 'Retrieve all jobs created via the async API, linked to the provided application ID (or alias).', + inputSchema: { + type: 'object', + properties: { + application_id: { + type: 'string', + }, + limit: { + type: 'integer', + description: 'The pagination limit for retrieving the jobs.', + }, + offset: { + type: 'integer', + description: 'The pagination offset for retrieving the jobs.', + }, + status: { + type: 'string', + title: 'api_job_status', + description: 'The status of the job.', + enum: ['in_progress', 'failed', 'completed'], + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { application_id, ...body } = args; + return client.applications.jobs.list(application_id, body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/applications/jobs/retrieve-applications-jobs.ts b/packages/mcp-server/src/tools/applications/jobs/retrieve-applications-jobs.ts new file mode 100644 index 00000000..fb642a61 --- /dev/null +++ b/packages/mcp-server/src/tools/applications/jobs/retrieve-applications-jobs.ts @@ -0,0 +1,24 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'retrieve_applications_jobs', + description: 'Retrieves a single job created via the Async API.', + inputSchema: { + type: 'object', + properties: { + job_id: { + type: 'string', + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { job_id } = args; + return client.applications.jobs.retrieve(job_id); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/applications/jobs/retry-applications-jobs.ts b/packages/mcp-server/src/tools/applications/jobs/retry-applications-jobs.ts new file mode 100644 index 00000000..b99fb9c3 --- /dev/null +++ b/packages/mcp-server/src/tools/applications/jobs/retry-applications-jobs.ts @@ -0,0 +1,25 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'retry_applications_jobs', + description: + 'Re-triggers the async execution of a single job previously created via the Async api and terminated in error.', + inputSchema: { + type: 'object', + properties: { + job_id: { + type: 'string', + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { job_id } = args; + return client.applications.jobs.retry(job_id); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/applications/list-applications.ts b/packages/mcp-server/src/tools/applications/list-applications.ts new file mode 100644 index 00000000..bd63b81e --- /dev/null +++ b/packages/mcp-server/src/tools/applications/list-applications.ts @@ -0,0 +1,45 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'list_applications', + description: + 'Retrieves a paginated list of no-code applications with optional filtering and sorting capabilities.', + inputSchema: { + type: 'object', + properties: { + after: { + type: 'string', + description: 'Return results after this application ID for pagination.', + }, + before: { + type: 'string', + description: 'Return results before this application ID for pagination.', + }, + limit: { + type: 'integer', + description: 'Maximum number of applications to return in the response.', + }, + order: { + type: 'string', + description: 'Sort order for the results based on creation time.', + enum: ['asc', 'desc'], + }, + type: { + type: 'string', + title: 'application_type', + description: 'Filter applications by their type.', + enum: ['generation'], + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { ...body } = args; + return client.applications.list(body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/applications/retrieve-applications.ts b/packages/mcp-server/src/tools/applications/retrieve-applications.ts new file mode 100644 index 00000000..288cfbba --- /dev/null +++ b/packages/mcp-server/src/tools/applications/retrieve-applications.ts @@ -0,0 +1,25 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'retrieve_applications', + description: + 'Retrieves detailed information for a specific no-code application, including its configuration and current status.', + inputSchema: { + type: 'object', + properties: { + application_id: { + type: 'string', + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { application_id } = args; + return client.applications.retrieve(application_id); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/chat/chat-chat.ts b/packages/mcp-server/src/tools/chat/chat-chat.ts new file mode 100644 index 00000000..df15f763 --- /dev/null +++ b/packages/mcp-server/src/tools/chat/chat-chat.ts @@ -0,0 +1,506 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'chat_chat', + description: + 'Generate a chat completion based on the provided messages. The response shown below is for non-streaming. To learn about streaming responses, see the [chat completion guide](/api-guides/chat-completion).', + inputSchema: { + type: 'object', + anyOf: [ + { + type: 'object', + properties: { + messages: { + type: 'array', + description: + 'An array of message objects that form the conversation history or context for the model to respond to. The array must contain at least one message.', + items: { + type: 'object', + properties: { + role: { + type: 'string', + description: + 'The role of the chat message. You can provide a system prompt by setting the role to `system`, or specify that a message is the result of a [tool call](/api-guides/tool-calling) by setting the role to `tool`.', + enum: ['user', 'assistant', 'system', 'tool'], + }, + content: { + type: 'string', + }, + graph_data: { + type: 'object', + title: 'graph_data', + properties: { + sources: { + type: 'array', + items: { + type: 'object', + title: 'source', + properties: { + file_id: { + type: 'string', + description: 'The unique identifier of the file.', + }, + snippet: { + type: 'string', + description: 'A snippet of text from the source file.', + }, + }, + required: ['file_id', 'snippet'], + }, + }, + status: { + type: 'string', + title: 'graph_stage_status', + enum: ['processing', 'finished'], + }, + subqueries: { + type: 'array', + items: { + type: 'object', + title: 'sub_query', + properties: { + answer: { + type: 'string', + description: 'The answer to the subquery.', + }, + query: { + type: 'string', + description: 'The subquery that was asked.', + }, + sources: { + type: 'array', + items: { + $ref: '#/anyOf/0/properties/messages/items/graph_data/sources/items', + }, + }, + }, + required: ['answer', 'query', 'sources'], + }, + }, + }, + required: [], + }, + name: { + type: 'string', + }, + refusal: { + type: 'string', + }, + tool_call_id: { + type: 'string', + }, + tool_calls: { + type: 'array', + items: { + type: 'object', + title: 'tool_call', + properties: { + id: { + type: 'string', + }, + function: { + type: 'object', + title: 'function', + properties: { + arguments: { + type: 'string', + }, + name: { + type: 'string', + }, + }, + required: ['arguments'], + }, + type: { + type: 'string', + enum: ['function'], + }, + index: { + type: 'integer', + }, + }, + required: ['id', 'function', 'type'], + }, + }, + }, + required: ['role'], + }, + }, + model: { + type: 'string', + description: + 'Specifies the model to be used for generating responses. The chat model is always `palmyra-x-004` for conversational use.', + }, + logprobs: { + type: 'boolean', + description: 'Specifies whether to return log probabilities of the output tokens.', + }, + max_tokens: { + type: 'integer', + description: + 'Defines the maximum number of tokens (words and characters) that the model can generate in the response. The default value is set to 16, but it can be adjusted to allow for longer or shorter responses as needed.', + }, + n: { + type: 'integer', + description: + 'Specifies the number of completions (responses) to generate from the model in a single request. This parameter allows multiple responses to be generated, offering a variety of potential replies from which to choose.', + }, + stop: { + anyOf: [ + { + type: 'array', + items: { + type: 'string', + }, + }, + { + type: 'string', + }, + ], + description: + 'A token or sequence of tokens that, when generated, will cause the model to stop producing further content. This can be a single token or an array of tokens, acting as a signal to end the output.', + }, + stream: { + type: 'string', + description: + 'Indicates whether the response should be streamed incrementally as it is generated or only returned once fully complete. Streaming can be useful for providing real-time feedback in interactive applications.', + enum: [false], + }, + stream_options: { + type: 'object', + title: 'stream_options', + description: 'Additional options for streaming.', + properties: { + include_usage: { + type: 'boolean', + description: 'Indicate whether to include usage information.', + }, + }, + required: ['include_usage'], + }, + temperature: { + type: 'number', + description: + "Controls the randomness or creativity of the model's responses. A higher temperature results in more varied and less predictable text, while a lower temperature produces more deterministic and conservative outputs.", + }, + tool_choice: { + anyOf: [ + { + type: 'object', + title: 'String', + properties: { + value: { + type: 'string', + title: 'string_tool_choice_options', + enum: ['none', 'auto', 'required'], + }, + }, + required: ['value'], + }, + { + type: 'object', + title: 'JSON object', + properties: { + value: { + type: 'object', + }, + }, + required: ['value'], + }, + ], + description: + 'Configure how the model will call functions: `auto` will allow the model to automatically choose the best tool, `none` disables tool calling. You can also pass a specific previously defined function.', + }, + tools: { + type: 'array', + description: + 'An array containing tool definitions for tools that the model can use to generate responses. The tool definitions use JSON schema. You can define your own functions or use one of the built-in `graph`, `llm`, or `vision` tools. Note that you can only use one built-in tool type in the array (only one of `graph`, `llm`, or `vision`).', + items: { + anyOf: [ + { + type: 'object', + title: 'Function tool', + properties: { + function: { + type: 'object', + title: 'tool_function', + description: 'A tool that uses a custom function.', + properties: { + name: { + type: 'string', + description: 'Name of the function.', + }, + description: { + type: 'string', + description: 'Description of the function.', + }, + parameters: { + type: 'object', + description: 'The parameters of the function.', + }, + }, + required: ['name'], + }, + type: { + type: 'string', + description: 'The type of tool.', + enum: ['function'], + }, + }, + required: ['function', 'type'], + }, + { + type: 'object', + title: 'Graph tool', + properties: { + function: { + type: 'object', + title: 'graph_function', + description: 'A tool that uses Knowledge Graphs as context for responses.', + properties: { + graph_ids: { + type: 'array', + description: 'An array of graph IDs to be used in the tool.', + items: { + type: 'string', + }, + }, + subqueries: { + type: 'boolean', + description: 'Boolean to indicate whether to include subqueries in the response.', + }, + description: { + type: 'string', + description: 'A description of the graph content.', + }, + }, + required: ['graph_ids', 'subqueries'], + }, + type: { + type: 'string', + description: 'The type of tool.', + enum: ['graph'], + }, + }, + required: ['function', 'type'], + }, + { + type: 'object', + title: 'LLM tool', + properties: { + function: { + type: 'object', + title: 'LLM function', + description: 'A tool that uses another Writer model to generate a response.', + properties: { + description: { + type: 'string', + description: 'A description of the model to be used.', + }, + model: { + type: 'string', + description: 'The model to be used.', + }, + }, + required: ['description', 'model'], + }, + type: { + type: 'string', + description: 'The type of tool.', + enum: ['llm'], + }, + }, + required: ['function', 'type'], + }, + { + type: 'object', + title: 'Vision tool', + properties: { + function: { + type: 'object', + title: 'Vision function', + description: 'A tool that uses Palmyra Vision to analyze images.', + properties: { + model: { + type: 'string', + description: 'The model to be used for image analysis. Must be `palmyra-vision`.', + }, + variables: { + type: 'array', + items: { + type: 'object', + title: 'Vision Tool Request File Variable', + properties: { + file_id: { + type: 'string', + description: + 'The File ID of the image to be analyzed. The file must be uploaded to the Writer platform before you use it with the Vision tool.', + }, + name: { + type: 'string', + description: + 'The name of the file variable. You must reference this name in the `message.content` field of the request to the chat completions endpoint. Use double curly braces (`{{}}`) to reference the file. For example, `Describe the difference between the image {{image_1}} and the image {{image_2}}`.', + }, + }, + required: ['file_id', 'name'], + }, + }, + }, + required: ['model', 'variables'], + }, + type: { + type: 'string', + description: 'The type of tool.', + enum: ['vision'], + }, + }, + required: ['function', 'type'], + }, + ], + }, + }, + top_p: { + type: 'number', + description: + 'Sets the threshold for "nucleus sampling," a technique to focus the model\'s token generation on the most likely subset of tokens. Only tokens with cumulative probability above this threshold are considered, controlling the trade-off between creativity and coherence.', + }, + }, + }, + { + type: 'object', + properties: { + messages: { + type: 'array', + description: + 'An array of message objects that form the conversation history or context for the model to respond to. The array must contain at least one message.', + items: { + type: 'object', + properties: { + role: { + type: 'string', + description: + 'The role of the chat message. You can provide a system prompt by setting the role to `system`, or specify that a message is the result of a [tool call](/api-guides/tool-calling) by setting the role to `tool`.', + enum: ['user', 'assistant', 'system', 'tool'], + }, + content: { + type: 'string', + }, + graph_data: { + $ref: '#/anyOf/0/properties/messages/items/graph_data', + }, + name: { + type: 'string', + }, + refusal: { + type: 'string', + }, + tool_call_id: { + type: 'string', + }, + tool_calls: { + type: 'array', + items: { + $ref: '#/anyOf/0/properties/messages/items/tool_calls/items', + }, + }, + }, + required: ['role'], + }, + }, + model: { + type: 'string', + description: + 'Specifies the model to be used for generating responses. The chat model is always `palmyra-x-004` for conversational use.', + }, + stream: { + type: 'string', + description: + 'Indicates whether the response should be streamed incrementally as it is generated or only returned once fully complete. Streaming can be useful for providing real-time feedback in interactive applications.', + enum: [true], + }, + logprobs: { + type: 'boolean', + description: 'Specifies whether to return log probabilities of the output tokens.', + }, + max_tokens: { + type: 'integer', + description: + 'Defines the maximum number of tokens (words and characters) that the model can generate in the response. The default value is set to 16, but it can be adjusted to allow for longer or shorter responses as needed.', + }, + n: { + type: 'integer', + description: + 'Specifies the number of completions (responses) to generate from the model in a single request. This parameter allows multiple responses to be generated, offering a variety of potential replies from which to choose.', + }, + stop: { + anyOf: [ + { + type: 'array', + items: { + type: 'string', + }, + }, + { + type: 'string', + }, + ], + description: + 'A token or sequence of tokens that, when generated, will cause the model to stop producing further content. This can be a single token or an array of tokens, acting as a signal to end the output.', + }, + stream_options: { + type: 'object', + title: 'stream_options', + description: 'Additional options for streaming.', + properties: { + include_usage: { + type: 'boolean', + description: 'Indicate whether to include usage information.', + }, + }, + required: ['include_usage'], + }, + temperature: { + type: 'number', + description: + "Controls the randomness or creativity of the model's responses. A higher temperature results in more varied and less predictable text, while a lower temperature produces more deterministic and conservative outputs.", + }, + tool_choice: { + anyOf: [ + { + $ref: '#/anyOf/0/properties/tool_choice/anyOf/0', + }, + { + $ref: '#/anyOf/0/properties/tool_choice/anyOf/1', + }, + ], + description: + 'Configure how the model will call functions: `auto` will allow the model to automatically choose the best tool, `none` disables tool calling. You can also pass a specific previously defined function.', + }, + tools: { + type: 'array', + description: + 'An array containing tool definitions for tools that the model can use to generate responses. The tool definitions use JSON schema. You can define your own functions or use one of the built-in `graph`, `llm`, or `vision` tools. Note that you can only use one built-in tool type in the array (only one of `graph`, `llm`, or `vision`).', + items: { + $ref: '#/anyOf/0/properties/tools/items', + }, + }, + top_p: { + type: 'number', + description: + 'Sets the threshold for "nucleus sampling," a technique to focus the model\'s token generation on the most likely subset of tokens. Only tokens with cumulative probability above this threshold are considered, controlling the trade-off between creativity and coherence.', + }, + }, + }, + ], + }, +}; + +export const handler = (client: Writer, args: any) => { + const { ...body } = args; + return client.chat.chat(body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/completions/create-completions.ts b/packages/mcp-server/src/tools/completions/create-completions.ts new file mode 100644 index 00000000..c9ae1e2b --- /dev/null +++ b/packages/mcp-server/src/tools/completions/create-completions.ts @@ -0,0 +1,137 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'create_completions', + description: 'Text generation', + inputSchema: { + type: 'object', + anyOf: [ + { + type: 'object', + properties: { + model: { + type: 'string', + description: 'The identifier of the model to be used for processing the request.', + }, + prompt: { + type: 'string', + description: 'The input text that the model will process to generate a response.', + }, + best_of: { + type: 'integer', + description: + 'Specifies the number of completions to generate and return the best one. Useful for generating multiple outputs and choosing the best based on some criteria.', + }, + max_tokens: { + type: 'integer', + description: 'The maximum number of tokens that the model can generate in the response.', + }, + random_seed: { + type: 'integer', + description: + 'A seed used to initialize the random number generator for the model, ensuring reproducibility of the output when the same inputs are provided.', + }, + stop: { + anyOf: [ + { + type: 'array', + items: { + type: 'string', + }, + }, + { + type: 'string', + }, + ], + description: + "Specifies stopping conditions for the model's output generation. This can be an array of strings or a single string that the model will look for as a signal to stop generating further tokens.", + }, + stream: { + type: 'string', + description: + "Determines whether the model's output should be streamed. If true, the output is generated and sent incrementally, which can be useful for real-time applications.", + enum: [false], + }, + temperature: { + type: 'number', + description: + "Controls the randomness of the model's outputs. Higher values lead to more random outputs, while lower values make the model more deterministic.", + }, + top_p: { + type: 'number', + description: + 'Used to control the nucleus sampling, where only the most probable tokens with a cumulative probability of top_p are considered for sampling, providing a way to fine-tune the randomness of predictions.', + }, + }, + }, + { + type: 'object', + properties: { + model: { + type: 'string', + description: 'The identifier of the model to be used for processing the request.', + }, + prompt: { + type: 'string', + description: 'The input text that the model will process to generate a response.', + }, + stream: { + type: 'string', + description: + "Determines whether the model's output should be streamed. If true, the output is generated and sent incrementally, which can be useful for real-time applications.", + enum: [true], + }, + best_of: { + type: 'integer', + description: + 'Specifies the number of completions to generate and return the best one. Useful for generating multiple outputs and choosing the best based on some criteria.', + }, + max_tokens: { + type: 'integer', + description: 'The maximum number of tokens that the model can generate in the response.', + }, + random_seed: { + type: 'integer', + description: + 'A seed used to initialize the random number generator for the model, ensuring reproducibility of the output when the same inputs are provided.', + }, + stop: { + anyOf: [ + { + type: 'array', + items: { + type: 'string', + }, + }, + { + type: 'string', + }, + ], + description: + "Specifies stopping conditions for the model's output generation. This can be an array of strings or a single string that the model will look for as a signal to stop generating further tokens.", + }, + temperature: { + type: 'number', + description: + "Controls the randomness of the model's outputs. Higher values lead to more random outputs, while lower values make the model more deterministic.", + }, + top_p: { + type: 'number', + description: + 'Used to control the nucleus sampling, where only the most probable tokens with a cumulative probability of top_p are considered for sampling, providing a way to fine-tune the randomness of predictions.', + }, + }, + }, + ], + }, +}; + +export const handler = (client: Writer, args: any) => { + const { ...body } = args; + return client.completions.create(body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/files/delete-files.ts b/packages/mcp-server/src/tools/files/delete-files.ts new file mode 100644 index 00000000..ce8d5764 --- /dev/null +++ b/packages/mcp-server/src/tools/files/delete-files.ts @@ -0,0 +1,24 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'delete_files', + description: 'Delete file', + inputSchema: { + type: 'object', + properties: { + file_id: { + type: 'string', + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { file_id } = args; + return client.files.delete(file_id); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/files/download-files.ts b/packages/mcp-server/src/tools/files/download-files.ts new file mode 100644 index 00000000..9179522b --- /dev/null +++ b/packages/mcp-server/src/tools/files/download-files.ts @@ -0,0 +1,24 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'download_files', + description: 'Download file', + inputSchema: { + type: 'object', + properties: { + file_id: { + type: 'string', + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { file_id } = args; + return client.files.download(file_id); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/files/list-files.ts b/packages/mcp-server/src/tools/files/list-files.ts new file mode 100644 index 00000000..c1a8a1a3 --- /dev/null +++ b/packages/mcp-server/src/tools/files/list-files.ts @@ -0,0 +1,52 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'list_files', + description: 'List files', + inputSchema: { + type: 'object', + properties: { + after: { + type: 'string', + description: + 'The ID of the last object in the previous page. This parameter instructs the API to return the next page of results.', + }, + before: { + type: 'string', + description: + 'The ID of the first object in the previous page. This parameter instructs the API to return the previous page of results.', + }, + graph_id: { + type: 'string', + description: 'The unique identifier of the graph to which the files belong.', + }, + limit: { + type: 'integer', + description: + 'Specifies the maximum number of objects returned in a page. The default value is 50. The minimum value is 1, and the maximum value is 100.', + }, + order: { + type: 'string', + description: + 'Specifies the order of the results. Valid values are asc for ascending and desc for descending.', + enum: ['asc', 'desc'], + }, + status: { + type: 'string', + description: + 'Specifies the status of the files to retrieve. Valid values are in_progress, completed or failed.', + enum: ['in_progress', 'completed', 'failed'], + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { ...body } = args; + return client.files.list(body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/files/retrieve-files.ts b/packages/mcp-server/src/tools/files/retrieve-files.ts new file mode 100644 index 00000000..e741aa2a --- /dev/null +++ b/packages/mcp-server/src/tools/files/retrieve-files.ts @@ -0,0 +1,24 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'retrieve_files', + description: 'Retrieve file', + inputSchema: { + type: 'object', + properties: { + file_id: { + type: 'string', + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { file_id } = args; + return client.files.retrieve(file_id); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/files/retry-files.ts b/packages/mcp-server/src/tools/files/retry-files.ts new file mode 100644 index 00000000..3ef13bd6 --- /dev/null +++ b/packages/mcp-server/src/tools/files/retry-files.ts @@ -0,0 +1,28 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'retry_files', + description: 'Retry failed files', + inputSchema: { + type: 'object', + properties: { + file_ids: { + type: 'array', + description: 'The unique identifier of the files to be retried.', + items: { + type: 'string', + }, + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { ...body } = args; + return client.files.retry(body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/files/upload-files.ts b/packages/mcp-server/src/tools/files/upload-files.ts new file mode 100644 index 00000000..faaf8420 --- /dev/null +++ b/packages/mcp-server/src/tools/files/upload-files.ts @@ -0,0 +1,27 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'upload_files', + description: 'Upload file', + inputSchema: { + type: 'object', + properties: { + content: { + type: 'string', + }, + 'Content-Disposition': { + type: 'string', + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { ...body } = args; + return client.files.upload(body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/graphs/add-file-to-graph-graphs.ts b/packages/mcp-server/src/tools/graphs/add-file-to-graph-graphs.ts new file mode 100644 index 00000000..0d1dc2d9 --- /dev/null +++ b/packages/mcp-server/src/tools/graphs/add-file-to-graph-graphs.ts @@ -0,0 +1,28 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'add_file_to_graph_graphs', + description: 'Add a file to a Knowledge Graph.', + inputSchema: { + type: 'object', + properties: { + graph_id: { + type: 'string', + }, + file_id: { + type: 'string', + description: 'The unique identifier of the file.', + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { graph_id, ...body } = args; + return client.graphs.addFileToGraph(graph_id, body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/graphs/create-graphs.ts b/packages/mcp-server/src/tools/graphs/create-graphs.ts new file mode 100644 index 00000000..e39dc80a --- /dev/null +++ b/packages/mcp-server/src/tools/graphs/create-graphs.ts @@ -0,0 +1,31 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'create_graphs', + description: 'Create a new Knowledge Graph.', + inputSchema: { + type: 'object', + properties: { + description: { + type: 'string', + description: + 'A description of the Knowledge Graph (max 255 characters). Omitting this field leaves the description unchanged.', + }, + name: { + type: 'string', + description: + 'The name of the Knowledge Graph (max 255 characters). Omitting this field leaves the name unchanged.', + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { ...body } = args; + return client.graphs.create(body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/graphs/delete-graphs.ts b/packages/mcp-server/src/tools/graphs/delete-graphs.ts new file mode 100644 index 00000000..bf0c7784 --- /dev/null +++ b/packages/mcp-server/src/tools/graphs/delete-graphs.ts @@ -0,0 +1,24 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'delete_graphs', + description: 'Delete a Knowledge Graph.', + inputSchema: { + type: 'object', + properties: { + graph_id: { + type: 'string', + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { graph_id } = args; + return client.graphs.delete(graph_id); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/graphs/list-graphs.ts b/packages/mcp-server/src/tools/graphs/list-graphs.ts new file mode 100644 index 00000000..0cbfa081 --- /dev/null +++ b/packages/mcp-server/src/tools/graphs/list-graphs.ts @@ -0,0 +1,42 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'list_graphs', + description: 'Retrieve a list of Knowledge Graphs.', + inputSchema: { + type: 'object', + properties: { + after: { + type: 'string', + description: + 'The ID of the last object in the previous page. This parameter instructs the API to return the next page of results.', + }, + before: { + type: 'string', + description: + 'The ID of the first object in the previous page. This parameter instructs the API to return the previous page of results.', + }, + limit: { + type: 'integer', + description: + 'Specifies the maximum number of objects returned in a page. The default value is 50. The minimum value is 1, and the maximum value is 100.', + }, + order: { + type: 'string', + description: + 'Specifies the order of the results. Valid values are asc for ascending and desc for descending.', + enum: ['asc', 'desc'], + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { ...body } = args; + return client.graphs.list(body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/graphs/question-graphs.ts b/packages/mcp-server/src/tools/graphs/question-graphs.ts new file mode 100644 index 00000000..415ce9cb --- /dev/null +++ b/packages/mcp-server/src/tools/graphs/question-graphs.ts @@ -0,0 +1,73 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'question_graphs', + description: 'Ask a question to specified Knowledge Graphs.', + inputSchema: { + type: 'object', + anyOf: [ + { + type: 'object', + properties: { + graph_ids: { + type: 'array', + description: 'The unique identifiers of the Knowledge Graphs to be queried.', + items: { + type: 'string', + }, + }, + question: { + type: 'string', + description: 'The question to be answered using the Knowledge Graph.', + }, + stream: { + type: 'string', + description: + "Determines whether the model's output should be streamed. If true, the output is generated and sent incrementally, which can be useful for real-time applications.", + enum: [false], + }, + subqueries: { + type: 'boolean', + description: 'Specify whether to include subqueries.', + }, + }, + }, + { + type: 'object', + properties: { + graph_ids: { + type: 'array', + description: 'The unique identifiers of the Knowledge Graphs to be queried.', + items: { + type: 'string', + }, + }, + question: { + type: 'string', + description: 'The question to be answered using the Knowledge Graph.', + }, + stream: { + type: 'string', + description: + "Determines whether the model's output should be streamed. If true, the output is generated and sent incrementally, which can be useful for real-time applications.", + enum: [true], + }, + subqueries: { + type: 'boolean', + description: 'Specify whether to include subqueries.', + }, + }, + }, + ], + }, +}; + +export const handler = (client: Writer, args: any) => { + const { ...body } = args; + return client.graphs.question(body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/graphs/remove-file-from-graph-graphs.ts b/packages/mcp-server/src/tools/graphs/remove-file-from-graph-graphs.ts new file mode 100644 index 00000000..702dd4c7 --- /dev/null +++ b/packages/mcp-server/src/tools/graphs/remove-file-from-graph-graphs.ts @@ -0,0 +1,27 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'remove_file_from_graph_graphs', + description: 'Remove a file from a Knowledge Graph.', + inputSchema: { + type: 'object', + properties: { + graph_id: { + type: 'string', + }, + file_id: { + type: 'string', + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { file_id, ...body } = args; + return client.graphs.removeFileFromGraph(file_id, body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/graphs/retrieve-graphs.ts b/packages/mcp-server/src/tools/graphs/retrieve-graphs.ts new file mode 100644 index 00000000..a4db022c --- /dev/null +++ b/packages/mcp-server/src/tools/graphs/retrieve-graphs.ts @@ -0,0 +1,24 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'retrieve_graphs', + description: 'Retrieve a Knowledge Graph.', + inputSchema: { + type: 'object', + properties: { + graph_id: { + type: 'string', + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { graph_id } = args; + return client.graphs.retrieve(graph_id); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/graphs/update-graphs.ts b/packages/mcp-server/src/tools/graphs/update-graphs.ts new file mode 100644 index 00000000..43862d8a --- /dev/null +++ b/packages/mcp-server/src/tools/graphs/update-graphs.ts @@ -0,0 +1,34 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'update_graphs', + description: 'Update the name and description of a Knowledge Graph.', + inputSchema: { + type: 'object', + properties: { + graph_id: { + type: 'string', + }, + description: { + type: 'string', + description: + 'A description of the Knowledge Graph (max 255 characters). Omitting this field leaves the description unchanged.', + }, + name: { + type: 'string', + description: + 'The name of the Knowledge Graph (max 255 characters). Omitting this field leaves the name unchanged.', + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { graph_id, ...body } = args; + return client.graphs.update(graph_id, body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/index.ts b/packages/mcp-server/src/tools/index.ts new file mode 100644 index 00000000..6078d718 --- /dev/null +++ b/packages/mcp-server/src/tools/index.ts @@ -0,0 +1,76 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import Writer from 'writer-sdk'; +import { Tool } from '@modelcontextprotocol/sdk/types.js'; + +import retrieve_applications from './applications/retrieve-applications'; +import list_applications from './applications/list-applications'; +import generate_content_applications from './applications/generate-content-applications'; +import create_applications_jobs from './applications/jobs/create-applications-jobs'; +import retrieve_applications_jobs from './applications/jobs/retrieve-applications-jobs'; +import list_applications_jobs from './applications/jobs/list-applications-jobs'; +import retry_applications_jobs from './applications/jobs/retry-applications-jobs'; +import update_applications_graphs from './applications/graphs/update-applications-graphs'; +import list_applications_graphs from './applications/graphs/list-applications-graphs'; +import chat_chat from './chat/chat-chat'; +import create_completions from './completions/create-completions'; +import list_models from './models/list-models'; +import create_graphs from './graphs/create-graphs'; +import retrieve_graphs from './graphs/retrieve-graphs'; +import update_graphs from './graphs/update-graphs'; +import list_graphs from './graphs/list-graphs'; +import delete_graphs from './graphs/delete-graphs'; +import add_file_to_graph_graphs from './graphs/add-file-to-graph-graphs'; +import question_graphs from './graphs/question-graphs'; +import remove_file_from_graph_graphs from './graphs/remove-file-from-graph-graphs'; +import retrieve_files from './files/retrieve-files'; +import list_files from './files/list-files'; +import delete_files from './files/delete-files'; +import download_files from './files/download-files'; +import retry_files from './files/retry-files'; +import upload_files from './files/upload-files'; +import context_aware_splitting_tools from './tools/context-aware-splitting-tools'; +import parse_pdf_tools from './tools/parse-pdf-tools'; +import medical_tools_comprehend from './tools/comprehend/medical-tools-comprehend'; +import analyze_vision from './vision/analyze-vision'; + +export const tools: Tool[] = []; + +export type HandlerFunction = (client: Writer, args: any) => Promise; +export const handlers: Record = {}; + +function addEndpoint(endpoint: { tool: Tool; handler: HandlerFunction }) { + tools.push(endpoint.tool); + handlers[endpoint.tool.name] = endpoint.handler; +} + +addEndpoint(retrieve_applications); +addEndpoint(list_applications); +addEndpoint(generate_content_applications); +addEndpoint(create_applications_jobs); +addEndpoint(retrieve_applications_jobs); +addEndpoint(list_applications_jobs); +addEndpoint(retry_applications_jobs); +addEndpoint(update_applications_graphs); +addEndpoint(list_applications_graphs); +addEndpoint(chat_chat); +addEndpoint(create_completions); +addEndpoint(list_models); +addEndpoint(create_graphs); +addEndpoint(retrieve_graphs); +addEndpoint(update_graphs); +addEndpoint(list_graphs); +addEndpoint(delete_graphs); +addEndpoint(add_file_to_graph_graphs); +addEndpoint(question_graphs); +addEndpoint(remove_file_from_graph_graphs); +addEndpoint(retrieve_files); +addEndpoint(list_files); +addEndpoint(delete_files); +addEndpoint(download_files); +addEndpoint(retry_files); +addEndpoint(upload_files); +addEndpoint(context_aware_splitting_tools); +addEndpoint(parse_pdf_tools); +addEndpoint(medical_tools_comprehend); +addEndpoint(analyze_vision); diff --git a/packages/mcp-server/src/tools/models/list-models.ts b/packages/mcp-server/src/tools/models/list-models.ts new file mode 100644 index 00000000..65221814 --- /dev/null +++ b/packages/mcp-server/src/tools/models/list-models.ts @@ -0,0 +1,20 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'list_models', + description: 'List models', + inputSchema: { + type: 'object', + properties: {}, + }, +}; + +export const handler = (client: Writer, args: any) => { + const {} = args; + return client.models.list(); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/tools/comprehend/medical-tools-comprehend.ts b/packages/mcp-server/src/tools/tools/comprehend/medical-tools-comprehend.ts new file mode 100644 index 00000000..887f300d --- /dev/null +++ b/packages/mcp-server/src/tools/tools/comprehend/medical-tools-comprehend.ts @@ -0,0 +1,33 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'medical_tools_comprehend', + description: + 'Analyze unstructured medical text to extract entities labeled with standardized medical codes and confidence scores.', + inputSchema: { + type: 'object', + properties: { + content: { + type: 'string', + description: 'The text to be analyzed.', + }, + response_type: { + type: 'string', + title: 'comprehend_medical_type', + description: + 'The structure of the response to be returned. `Entities` returns medical entities, `RxNorm` returns medication information, `ICD-10-CM` returns diagnosis codes, and `SNOMED CT` returns medical concepts.', + enum: ['Entities', 'RxNorm', 'ICD-10-CM', 'SNOMED CT'], + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { ...body } = args; + return client.tools.comprehend.medical(body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/tools/context-aware-splitting-tools.ts b/packages/mcp-server/src/tools/tools/context-aware-splitting-tools.ts new file mode 100644 index 00000000..7c8a51be --- /dev/null +++ b/packages/mcp-server/src/tools/tools/context-aware-splitting-tools.ts @@ -0,0 +1,33 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'context_aware_splitting_tools', + description: + 'Splits a long block of text (maximum 4000 words) into smaller chunks while preserving the semantic meaning of the text and context between the chunks.', + inputSchema: { + type: 'object', + properties: { + strategy: { + type: 'string', + title: 'splitting_strategy', + description: + 'The strategy to be used for splitting the text into chunks. `llm_split` uses the language model to split the text, `fast_split` uses a fast heuristic-based approach, and `hybrid_split` combines both strategies.', + enum: ['llm_split', 'fast_split', 'hybrid_split'], + }, + text: { + type: 'string', + description: 'The text to be split into chunks.', + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { ...body } = args; + return client.tools.contextAwareSplitting(body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/tools/parse-pdf-tools.ts b/packages/mcp-server/src/tools/tools/parse-pdf-tools.ts new file mode 100644 index 00000000..881e0855 --- /dev/null +++ b/packages/mcp-server/src/tools/tools/parse-pdf-tools.ts @@ -0,0 +1,30 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'parse_pdf_tools', + description: 'Parse PDF to other formats.', + inputSchema: { + type: 'object', + properties: { + file_id: { + type: 'string', + }, + format: { + type: 'string', + title: 'pdf_conversion_format', + description: 'The format into which the PDF content should be converted.', + enum: ['text', 'markdown'], + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { file_id, ...body } = args; + return client.tools.parsePdf(file_id, body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/src/tools/vision/analyze-vision.ts b/packages/mcp-server/src/tools/vision/analyze-vision.ts new file mode 100644 index 00000000..b1108bb6 --- /dev/null +++ b/packages/mcp-server/src/tools/vision/analyze-vision.ts @@ -0,0 +1,52 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; +import Writer from 'writer-sdk'; + +export const tool: Tool = { + name: 'analyze_vision', + description: 'Submit images and a prompt to generate an analysis of the images.', + inputSchema: { + type: 'object', + properties: { + model: { + type: 'string', + description: 'The model to be used for image analysis. Currently only supports `palmyra-vision`.', + }, + prompt: { + type: 'string', + description: + 'The prompt to use for the image analysis. The prompt must include the name of each image variable, surrounded by double curly braces (`{{}}`). For example, `Describe the difference between the image {{image_1}} and the image {{image_2}}`.', + }, + variables: { + type: 'array', + items: { + type: 'object', + title: 'Vision Request File Variable', + description: + 'An array of file variables required for the analysis. The image files must be uploaded to the Writer platform before they can be used in a vision request. Learn how to upload files using the [Files API](/api-guides/api-reference/file-api/upload-files).', + properties: { + file_id: { + type: 'string', + description: + 'The File ID of the image to be analyzed. The file must be uploaded to the Writer platform before it can be used in a vision request.', + }, + name: { + type: 'string', + description: + 'The name of the file variable. You must reference this name in the prompt with double curly braces (`{{}}`). For example, `Describe the difference between the image {{image_1}} and the image {{image_2}}`.', + }, + }, + required: ['file_id', 'name'], + }, + }, + }, + }, +}; + +export const handler = (client: Writer, args: any) => { + const { ...body } = args; + return client.vision.analyze(body); +}; + +export default { tool, handler }; diff --git a/packages/mcp-server/tsc-multi.json b/packages/mcp-server/tsc-multi.json new file mode 100644 index 00000000..4facad5a --- /dev/null +++ b/packages/mcp-server/tsc-multi.json @@ -0,0 +1,7 @@ +{ + "targets": [ + { "extname": ".js", "module": "commonjs" }, + { "extname": ".mjs", "module": "esnext" } + ], + "projects": ["tsconfig.build.json"] +} diff --git a/packages/mcp-server/tsconfig.build.json b/packages/mcp-server/tsconfig.build.json new file mode 100644 index 00000000..c012e223 --- /dev/null +++ b/packages/mcp-server/tsconfig.build.json @@ -0,0 +1,18 @@ +{ + "extends": "./tsconfig.json", + "include": ["dist/src"], + "exclude": [], + "compilerOptions": { + "rootDir": "./dist/src", + "paths": { + "writer-sdk-mcp/*": ["src/*"], + "writer-sdk-mcp": ["src/index.ts"] + }, + "noEmit": false, + "declaration": true, + "declarationMap": true, + "outDir": "dist", + "pretty": true, + "sourceMap": true + } +} diff --git a/packages/mcp-server/tsconfig.dist-src.json b/packages/mcp-server/tsconfig.dist-src.json new file mode 100644 index 00000000..e9f2d70b --- /dev/null +++ b/packages/mcp-server/tsconfig.dist-src.json @@ -0,0 +1,11 @@ +{ + // this config is included in the published src directory to prevent TS errors + // from appearing when users go to source, and VSCode opens the source .ts file + // via declaration maps + "include": ["index.ts"], + "compilerOptions": { + "target": "es2015", + "lib": ["DOM"], + "moduleResolution": "node" + } +} diff --git a/packages/mcp-server/tsconfig.json b/packages/mcp-server/tsconfig.json new file mode 100644 index 00000000..8425e77e --- /dev/null +++ b/packages/mcp-server/tsconfig.json @@ -0,0 +1,37 @@ +{ + "include": ["src", "tests", "examples"], + "exclude": [], + "compilerOptions": { + "target": "es2020", + "lib": ["es2020"], + "module": "commonjs", + "moduleResolution": "node", + "esModuleInterop": true, + "baseUrl": "./", + "paths": { + "writer-sdk-mcp/*": ["src/*"], + "writer-sdk-mcp": ["src/index.ts"] + }, + "noEmit": true, + + "resolveJsonModule": true, + + "forceConsistentCasingInFileNames": true, + + "strict": true, + "noImplicitAny": true, + "strictNullChecks": true, + "strictFunctionTypes": true, + "strictBindCallApply": true, + "strictPropertyInitialization": true, + "noImplicitThis": true, + "noImplicitReturns": true, + "alwaysStrict": true, + "exactOptionalPropertyTypes": true, + "noUncheckedIndexedAccess": true, + "noImplicitOverride": true, + "noPropertyAccessFromIndexSignature": true, + + "skipLibCheck": true + } +} diff --git a/release-please-config.json b/release-please-config.json index 624ed99e..b1909804 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -62,6 +62,12 @@ "release-type": "node", "extra-files": [ "src/version.ts", - "README.md" + "README.md", + "packages/mcp-server/yarn.lock", + { + "type": "json", + "path": "packages/mcp-server/package.json", + "jsonpath": "$.version" + } ] } diff --git a/scripts/build-all b/scripts/build-all new file mode 100755 index 00000000..8ac03ea6 --- /dev/null +++ b/scripts/build-all @@ -0,0 +1,12 @@ +#!/usr/bin/env bash +set -exuo pipefail + +# build the core SDK package and all sub-packages + +bash ./scripts/build + +for dir in packages/*; do + if [ -d "$dir" ]; then + (cd "$dir" && yarn install && yarn build) + fi +done diff --git a/scripts/publish-packages.ts b/scripts/publish-packages.ts new file mode 100644 index 00000000..50e93fef --- /dev/null +++ b/scripts/publish-packages.ts @@ -0,0 +1,102 @@ +/** + * Called from the `create-releases.yml` workflow with the output + * of the release please action as the first argument. + * + * Example JSON input: + * + * ```json + { + "releases_created": "true", + "release_created": "true", + "id": "137967744", + "name": "sdk: v0.14.5", + "tag_name": "sdk-v0.14.5", + "sha": "7cc2ba5c694e76a117f731d4cf0b06f8b8361f2e", + "body": "## 0.14.5 (2024-01-22)\n\n...", + "html_url": "https://github.com/$org/$repo/releases/tag/sdk-v0.14.5", + "draft": "false", + "upload_url": "https://uploads.github.com/repos/$org/$repo/releases/137967744/assets{?name,label}", + "path": ".", + "version": "0.14.5", + "major": "0", + "minor": "14", + "patch": "5", + "packages/additional-sdk--release_created": "true", + "packages/additional-sdk--id": "137967756", + "packages/additional-sdk--name": "additional-sdk: v0.5.2", + "packages/additional-sdk--tag_name": "additional-sdk-v0.5.2", + "packages/additional-sdk--sha": "7cc2ba5c694e76a117f731d4cf0b06f8b8361f2e", + "packages/additional-sdk--body": "## 0.5.2 (2024-01-22)\n\n...", + "packages/additional-sdk--html_url": "https://github.com/$org/$repo/releases/tag/additional-sdk-v0.5.2", + "packages/additional-sdk--draft": "false", + "packages/additional-sdk--upload_url": "https://uploads.github.com/repos/$org/$repo/releases/137967756/assets{?name,label}", + "packages/additional-sdk--path": "packages/additional-sdk", + "packages/additional-sdk--version": "0.5.2", + "packages/additional-sdk--major": "0", + "packages/additional-sdk--minor": "5", + "packages/additional-sdk--patch": "2", + "paths_released": "[\".\",\"packages/additional-sdk\"]" + } + ``` + */ + +import { execSync } from 'child_process'; +import path from 'path'; + +function main() { + const data = process.argv[2] ?? process.env['DATA']; + if (!data) { + throw new Error(`Usage: publish-packages.ts '{"json": "obj"}'`); + } + + const rootDir = path.join(__dirname, '..'); + console.log('root dir', rootDir); + console.log(`publish-packages called with ${data}`); + + const outputs = JSON.parse(data); + + const rawPaths = outputs.paths_released; + + if (!rawPaths) { + console.error(JSON.stringify(outputs, null, 2)); + throw new Error('Expected outputs to contain a truthy `paths_released` property'); + } + if (typeof rawPaths !== 'string') { + console.error(JSON.stringify(outputs, null, 2)); + throw new Error('Expected outputs `paths_released` property to be a JSON string'); + } + + const paths = JSON.parse(rawPaths); + if (!Array.isArray(paths)) { + console.error(JSON.stringify(outputs, null, 2)); + throw new Error('Expected outputs `paths_released` property to be an array'); + } + if (!paths.length) { + console.error(JSON.stringify(outputs, null, 2)); + throw new Error('Expected outputs `paths_released` property to contain at least one entry'); + } + + const publishScriptPath = path.join(rootDir, 'bin', 'publish-npm'); + console.log('Using publish script at', publishScriptPath); + + console.log('Ensuring root package is built'); + console.log(`$ yarn build`); + execSync(`yarn build`, { cwd: rootDir, encoding: 'utf8', stdio: 'inherit' }); + + for (const relPackagePath of paths) { + console.log('\n'); + + const packagePath = path.join(rootDir, relPackagePath); + console.log(`Publishing in directory: ${packagePath}`); + + console.log(`$ yarn install`); + execSync(`yarn install`, { cwd: packagePath, encoding: 'utf8', stdio: 'inherit' }); + + console.log(`$ bash ${publishScriptPath}`); + execSync(`bash ${publishScriptPath}`, { cwd: packagePath, encoding: 'utf8', stdio: 'inherit' }); + } + + console.log('Finished publishing packages'); +} + +main(); diff --git a/scripts/utils/make-dist-package-json.cjs b/scripts/utils/make-dist-package-json.cjs index 7c24f56e..4d6634ea 100644 --- a/scripts/utils/make-dist-package-json.cjs +++ b/scripts/utils/make-dist-package-json.cjs @@ -12,6 +12,14 @@ processExportMap(pkgJson.exports); for (const key of ['types', 'main', 'module']) { if (typeof pkgJson[key] === 'string') pkgJson[key] = pkgJson[key].replace(/^(\.\/)?dist\//, './'); } +// Fix bin paths if present +if (pkgJson.bin) { + for (const key in pkgJson.bin) { + if (typeof pkgJson.bin[key] === 'string') { + pkgJson.bin[key] = pkgJson.bin[key].replace(/^(\.\/)?dist\//, './'); + } + } +} delete pkgJson.devDependencies; delete pkgJson.scripts.prepack;