Skip to content

Commit

Permalink
feat: allow to override API (fixes #1)
Browse files Browse the repository at this point in the history
  • Loading branch information
lucgagan committed Jun 11, 2023
1 parent 125e20b commit 2b1a1d6
Show file tree
Hide file tree
Showing 2 changed files with 40 additions and 21 deletions.
15 changes: 15 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ This SDK makes it simple to:
- [save and restore chat conversations](#resuming-conversations)
- [stream chat responses](#streaming-conversations)
- [cancel chat responses](#cancelling-responses)
- [override the API endpoint](#overriding-api)

## Usage

Expand Down Expand Up @@ -129,6 +130,20 @@ try {
}
```

### Overriding API

If you want to use `completions` library against another API endpoint that is compatible with the official API, you can do so by passing `apiUrl` parameter:

```ts
import { createChat, CancelledCompletionError } from "completions";

const chat = createChat({
apiKey: '',
apiUrl: 'https://ray.run/api/completions'
model: "gpt-3.5-turbo",
});
```

## My other projects

- [Developer Utilities](https://ray.run/tools)
Expand Down
46 changes: 25 additions & 21 deletions src/createCompletions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ const ResponseChunkZodSchema = z
*/
const CompletionsOptionsZodSchema = z
.object({
apiUrl: z.string().optional(),
onMessage: z
.function()
.args(
Expand Down Expand Up @@ -114,27 +115,30 @@ export type CompletionResponse = z.infer<typeof CompletionResponseZodSchema>;
export const createCompletions = async (
options: CompletionsOptions
): Promise<CompletionResponse> => {
const response = await fetch("https://api.openai.com/v1/chat/completions", {
headers: {
Authorization: `Bearer ${options.apiKey}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
messages: options.messages,
model: options.model,
stream: true,
temperature: options.temperature,
top_p: options.topP,
n: options.n,
stop: options.stop,
frequency_penalty: options.frequencyPenalty,
presence_penalty: options.presencePenalty,
logit_bias: options.logitBias,
max_tokens: options.maxTokens,
user: options.user,
}),
method: "POST",
});
const response = await fetch(
options.apiUrl ?? "https://api.openai.com/v1/chat/completions",
{
headers: {
Authorization: `Bearer ${options.apiKey}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
messages: options.messages,
model: options.model,
stream: true,
temperature: options.temperature,
top_p: options.topP,
n: options.n,
stop: options.stop,
frequency_penalty: options.frequencyPenalty,
presence_penalty: options.presencePenalty,
logit_bias: options.logitBias,
max_tokens: options.maxTokens,
user: options.user,
}),
method: "POST",
}
);

if (!response.body) {
throw new Error("Expected response to have a body");
Expand Down

0 comments on commit 2b1a1d6

Please sign in to comment.