Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/prompt-caching.md
Original file line number Diff line number Diff line change
Expand Up @@ -188,3 +188,4 @@ The `cache_discount` field shows how much you saved. Some providers (like Anthro
See ecosystem-specific examples:

- **TypeScript + fetch**: [typescript/fetch/src/prompt-caching/](../typescript/fetch/src/prompt-caching/)
- **AI SDK v5** (Vercel): [typescript/ai-sdk-v5/src/prompt-caching/](../typescript/ai-sdk-v5/src/prompt-caching/)
40 changes: 40 additions & 0 deletions typescript/ai-sdk-v5/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# AI SDK v5 Examples

Examples using Vercel AI SDK v5 with @openrouter/ai-sdk-provider.

## Prerequisites

- Bun runtime: `curl -fsSL https://bun.sh/install | bash`
- `OPENROUTER_API_KEY` environment variable

## Running Examples

```bash
# From monorepo root (typescript/)
bun examples

# Or from this workspace
cd ai-sdk-v5
bun examples
```

## Features

- [prompt-caching](./src/prompt-caching/) - Anthropic caching examples with AI SDK v5

### Key Configuration

**CRITICAL**: The AI SDK example requires:
```typescript
extraBody: {
stream_options: { include_usage: true }
}
```

Without this, usage details (including cached_tokens) are not populated in the response.

## Dependencies

- `@openrouter-examples/shared` - Shared constants (LARGE_SYSTEM_PROMPT) and types
- `@openrouter/ai-sdk-provider` - OpenRouter provider for AI SDK
- `ai` v5.x - Vercel AI SDK
18 changes: 18 additions & 0 deletions typescript/ai-sdk-v5/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
{
"name": "@openrouter-examples/ai-sdk-v5",
"version": "1.0.0",
"private": true,
"type": "module",
"scripts": {
"examples": "bun run run-examples.ts",
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@openrouter-examples/shared": "workspace:*",
"@openrouter/ai-sdk-provider": "1.2.2",
"ai": "^5.0.92"
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

freeze deps

},
"devDependencies": {
"@types/bun": "latest"
}
}
57 changes: 57 additions & 0 deletions typescript/ai-sdk-v5/run-examples.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
#!/usr/bin/env bun
/**
* Run all example files in the src/ directory
* Each example is run in a separate process to handle process.exit() calls
*/

import { readdirSync, statSync } from 'fs';
import { join } from 'path';
import { $ } from 'bun';

const srcDir = join(import.meta.dir, 'src');

// Recursively find all .ts files in src/
function findExamples(dir: string): string[] {
const entries = readdirSync(dir);
const files: string[] = [];

for (const entry of entries) {
const fullPath = join(dir, entry);
const stat = statSync(fullPath);

if (stat.isDirectory()) {
files.push(...findExamples(fullPath));
} else if (entry.endsWith('.ts')) {
files.push(fullPath);
}
}

return files.sort();
}

const examples = findExamples(srcDir);
console.log(`Found ${examples.length} example(s)\n`);

let failed = 0;
for (const example of examples) {
const relativePath = example.replace(import.meta.dir + '/', '');
console.log(`\n${'='.repeat(80)}`);
console.log(`Running: ${relativePath}`);
console.log('='.repeat(80));

try {
await $`bun run ${example}`.quiet();
console.log(`✅ ${relativePath} completed successfully`);
} catch (error) {
console.error(`❌ ${relativePath} failed`);
failed++;
}
}

console.log(`\n${'='.repeat(80)}`);
console.log(`Results: ${examples.length - failed}/${examples.length} passed`);
console.log('='.repeat(80));

if (failed > 0) {
process.exit(1);
}
12 changes: 12 additions & 0 deletions typescript/ai-sdk-v5/src/prompt-caching/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# Prompt Caching Examples (AI SDK v5)

Examples demonstrating prompt caching with Vercel AI SDK v5.

## Documentation

For full prompt caching documentation including all providers, pricing, and configuration details, see:
- **[Prompt Caching Guide](../../../../docs/prompt-caching.md)**

## Examples in This Directory

See the TypeScript files in this directory for specific examples with complete working code.
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
/**
* Example: Anthropic Prompt Caching - Multi-Message Conversation (AI SDK v5)
*
* This example demonstrates Anthropic prompt caching in a multi-message conversation
* via OpenRouter using Vercel AI SDK v5.
*
* Pattern: User message cache in multi-turn conversation
* - Cache large context in first user message
* - Cache persists through conversation history
*/

import { LARGE_SYSTEM_PROMPT } from '@openrouter-examples/shared/constants';
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
import { generateText } from 'ai';

const openrouter = createOpenRouter({
apiKey: process.env.OPENROUTER_API_KEY,
extraBody: {
stream_options: { include_usage: true },
},
});

async function main() {
console.log('╔════════════════════════════════════════════════════════════════════════════╗');
console.log('║ Anthropic Prompt Caching - Multi-Message (AI SDK v5) ║');
console.log('╚════════════════════════════════════════════════════════════════════════════╝');
console.log();
console.log('Testing cache_control in multi-turn conversation');
console.log();

try {
const testId = Date.now();
const model = openrouter('anthropic/claude-3-5-sonnet');
const largeContext = `Test ${testId}: Context:\n\n${LARGE_SYSTEM_PROMPT}`;

// First call with conversation history
console.log('First Call (Cache Miss Expected)');
const result1 = await generateText({
model,
messages: [
{
role: 'user',
content: [
{
type: 'text',
text: largeContext,
providerOptions: {
openrouter: {
cacheControl: { type: 'ephemeral' },
},
},
},
{
type: 'text',
text: "Hello, what's your purpose?",
},
],
},
{
role: 'assistant',
content: "I'm an AI assistant designed to help with various tasks.",
},
{
role: 'user',
content: 'What programming languages do you know?',
},
],
});

// FIXME: providerMetadata.openrouter.usage should have proper type with promptTokensDetails
const cached1 =
// @ts-expect-error - usage is typed as JSONValue but should be OpenRouterUsage
result1.providerMetadata?.openrouter?.usage?.promptTokensDetails?.cachedTokens ?? 0;
console.log(` Response: ${result1.text.substring(0, 80)}...`);
console.log(` cached_tokens=${cached1}`);

await new Promise((resolve) => setTimeout(resolve, 1000));

// Second identical call - should hit cache
console.log('\nSecond Call (Cache Hit Expected)');
const result2 = await generateText({
model,
messages: [
{
role: 'user',
content: [
{
type: 'text',
text: largeContext,
providerOptions: {
openrouter: {
cacheControl: { type: 'ephemeral' },
},
},
},
{
type: 'text',
text: "Hello, what's your purpose?",
},
],
},
{
role: 'assistant',
content: "I'm an AI assistant designed to help with various tasks.",
},
{
role: 'user',
content: 'What programming languages do you know?',
},
],
});

// FIXME: providerMetadata.openrouter.usage should have proper type with promptTokensDetails
const cached2 =
// @ts-expect-error - usage is typed as JSONValue but should be OpenRouterUsage
result2.providerMetadata?.openrouter?.usage?.promptTokensDetails?.cachedTokens ?? 0;
console.log(` Response: ${result2.text.substring(0, 80)}...`);
console.log(` cached_tokens=${cached2}`);

// Analysis
console.log('\n' + '='.repeat(80));
console.log('ANALYSIS');
console.log('='.repeat(80));
console.log(`First call: cached_tokens=${cached1} (expected: 0)`);
console.log(`Second call: cached_tokens=${cached2} (expected: >0)`);

const success = cached1 === 0 && cached2 > 0;
console.log(`\nResult: ${success ? '✓ CACHE WORKING' : '✗ CACHE NOT WORKING'}`);

if (success) {
console.log('\n✓ SUCCESS - Multi-message caching is working correctly');
} else {
console.log('\n✗ FAILURE - Multi-message caching is not working as expected');
}
} catch (error) {
console.error('\n❌ ERROR:', error);
process.exit(1);
}
}

main();
Loading