Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
76 changes: 76 additions & 0 deletions mcp/schema_cache.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
// Copyright 2025 The Go MCP SDK Authors. All rights reserved.
// Use of this source code is governed by an MIT-style
// license that can be found in the LICENSE file.

package mcp

import (
"reflect"
"sync"

"github.com/google/jsonschema-go/jsonschema"
)

// schemaCache provides concurrent-safe caching for JSON schemas.
// It caches both by reflect.Type (for auto-generated schemas) and
// by schema pointer (for pre-defined schemas).
//
// This cache significantly improves performance for stateless server deployments
// where tools are re-registered on every request. Without caching, each AddTool
// call would trigger expensive reflection-based schema generation and resolution.
type schemaCache struct {
// byType caches schemas generated from Go types via jsonschema.ForType.
// Key: reflect.Type, Value: *cachedSchema
byType sync.Map

// bySchema caches resolved schemas for pre-defined Schema objects.
// Key: *jsonschema.Schema (pointer identity), Value: *jsonschema.Resolved
// This uses pointer identity because integrators typically reuse the same
// Tool objects across requests, so the schema pointer remains stable.
bySchema sync.Map
}

// cachedSchema holds both the generated schema and its resolved form.
type cachedSchema struct {
schema *jsonschema.Schema
resolved *jsonschema.Resolved
}

// globalSchemaCache is the package-level cache used by setSchema.
// It is unbounded since typical MCP servers have <100 tools.
var globalSchemaCache = &schemaCache{}

// getByType retrieves a cached schema by Go type.
// Returns the schema, resolved schema, and whether the cache hit.
func (c *schemaCache) getByType(t reflect.Type) (*jsonschema.Schema, *jsonschema.Resolved, bool) {
if v, ok := c.byType.Load(t); ok {
cs := v.(*cachedSchema)
return cs.schema, cs.resolved, true
}
return nil, nil, false
}

// setByType caches a schema by Go type.
func (c *schemaCache) setByType(t reflect.Type, schema *jsonschema.Schema, resolved *jsonschema.Resolved) {
c.byType.Store(t, &cachedSchema{schema: schema, resolved: resolved})
}

// getBySchema retrieves a cached resolved schema by the original schema pointer.
// This is used when integrators provide pre-defined schemas (e.g., github-mcp-server pattern).
func (c *schemaCache) getBySchema(schema *jsonschema.Schema) (*jsonschema.Resolved, bool) {
if v, ok := c.bySchema.Load(schema); ok {
return v.(*jsonschema.Resolved), true
}
return nil, false
}

// setBySchema caches a resolved schema by the original schema pointer.
func (c *schemaCache) setBySchema(schema *jsonschema.Schema, resolved *jsonschema.Resolved) {
c.bySchema.Store(schema, resolved)
}

// resetForTesting clears the cache. Only for use in tests.
func (c *schemaCache) resetForTesting() {
c.byType.Clear()
c.bySchema.Clear()
}
161 changes: 161 additions & 0 deletions mcp/schema_cache_benchmark_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,161 @@
// Copyright 2025 The Go MCP SDK Authors. All rights reserved.
// Use of this source code is governed by an MIT-style
// license that can be found in the LICENSE file.

package mcp

import (
"context"
"testing"

"github.com/google/jsonschema-go/jsonschema"
)

// BenchmarkAddToolTypedHandler measures performance of AddTool with typed handlers.
// This simulates the stateless server pattern where new servers are created per request.
func BenchmarkAddToolTypedHandler(b *testing.B) {
type SearchInput struct {
Query string `json:"query" jsonschema:"required"`
Page int `json:"page"`
PerPage int `json:"per_page"`
}

type SearchOutput struct {
Results []string `json:"results"`
Total int `json:"total"`
}

handler := func(ctx context.Context, req *CallToolRequest, in SearchInput) (*CallToolResult, SearchOutput, error) {
return &CallToolResult{}, SearchOutput{}, nil
}

tool := &Tool{
Name: "search",
Description: "Search for items",
}

// Reset cache to simulate cold start for first iteration
globalSchemaCache.resetForTesting()

b.ResetTimer()
b.ReportAllocs()

for i := 0; i < b.N; i++ {
s := NewServer(&Implementation{Name: "test", Version: "1.0"}, nil)
AddTool(s, tool, handler)
}
}

// BenchmarkAddToolPreDefinedSchema measures performance with pre-defined schemas.
// This simulates how github-mcp-server registers tools with manual InputSchema.
func BenchmarkAddToolPreDefinedSchema(b *testing.B) {
schema := &jsonschema.Schema{
Type: "object",
Properties: map[string]*jsonschema.Schema{
"query": {Type: "string", Description: "Search query"},
"page": {Type: "integer", Description: "Page number"},
"per_page": {Type: "integer", Description: "Results per page"},
},
Required: []string{"query"},
}

handler := func(ctx context.Context, req *CallToolRequest) (*CallToolResult, error) {
return &CallToolResult{}, nil
}

tool := &Tool{
Name: "search",
Description: "Search for items",
InputSchema: schema, // Pre-defined schema like github-mcp-server
}

// Reset cache to simulate cold start for first iteration
globalSchemaCache.resetForTesting()

b.ResetTimer()
b.ReportAllocs()

for i := 0; i < b.N; i++ {
s := NewServer(&Implementation{Name: "test", Version: "1.0"}, nil)
s.AddTool(tool, handler)
}
}

// BenchmarkAddToolTypedHandlerNoCache measures performance without caching.
// Used to compare before/after performance.
func BenchmarkAddToolTypedHandlerNoCache(b *testing.B) {
type SearchInput struct {
Query string `json:"query" jsonschema:"required"`
Page int `json:"page"`
PerPage int `json:"per_page"`
}

type SearchOutput struct {
Results []string `json:"results"`
Total int `json:"total"`
}

handler := func(ctx context.Context, req *CallToolRequest, in SearchInput) (*CallToolResult, SearchOutput, error) {
return &CallToolResult{}, SearchOutput{}, nil
}

tool := &Tool{
Name: "search",
Description: "Search for items",
}

b.ResetTimer()
b.ReportAllocs()

for i := 0; i < b.N; i++ {
// Reset cache every iteration to simulate no caching
globalSchemaCache.resetForTesting()

s := NewServer(&Implementation{Name: "test", Version: "1.0"}, nil)
AddTool(s, tool, handler)
}
}

// BenchmarkAddToolMultipleTools simulates registering multiple tools like github-mcp-server.
func BenchmarkAddToolMultipleTools(b *testing.B) {
type Input1 struct {
Query string `json:"query"`
}
type Input2 struct {
ID int `json:"id"`
}
type Input3 struct {
Name string `json:"name"`
Value string `json:"value"`
}
type Output struct {
Success bool `json:"success"`
}

handler1 := func(ctx context.Context, req *CallToolRequest, in Input1) (*CallToolResult, Output, error) {
return &CallToolResult{}, Output{}, nil
}
handler2 := func(ctx context.Context, req *CallToolRequest, in Input2) (*CallToolResult, Output, error) {
return &CallToolResult{}, Output{}, nil
}
handler3 := func(ctx context.Context, req *CallToolRequest, in Input3) (*CallToolResult, Output, error) {
return &CallToolResult{}, Output{}, nil
}

tool1 := &Tool{Name: "tool1", Description: "Tool 1"}
tool2 := &Tool{Name: "tool2", Description: "Tool 2"}
tool3 := &Tool{Name: "tool3", Description: "Tool 3"}

// Reset cache before benchmark
globalSchemaCache.resetForTesting()

b.ResetTimer()
b.ReportAllocs()

for i := 0; i < b.N; i++ {
s := NewServer(&Implementation{Name: "test", Version: "1.0"}, nil)
AddTool(s, tool1, handler1)
AddTool(s, tool2, handler2)
AddTool(s, tool3, handler3)
}
}
Loading