Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 35 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,7 @@
"@ai-sdk/openai-compatible": "^1.0.7",
"@ai-sdk/provider": "^2.0.0",
"ai": "^5.0.14",
"ai-sdk-mistral-fim": "^0.0.1",
"ai-sdk-ollama": "^0.5.0",
"uuid": "^11.1.0"
}
Expand Down
7 changes: 0 additions & 7 deletions src/autocomplete/context.ts

This file was deleted.

114 changes: 114 additions & 0 deletions src/autocomplete/defaultHoleFiller.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
import { HoleFiller, PromptArgs, AutoCompleteContext } from "./holeFiller";

// Source: continue/core/autocomplete/templating/AutocompleteTemplate.ts (holeFillerTemplate)
export class DefaultHoleFiller implements HoleFiller {
systemPrompt(): string {
// From https://github.com/VictorTaelin/AI-scripts
return `You are a HOLE FILLER. You are provided with a file containing holes, formatted as '{{HOLE_NAME}}'.
Your TASK is to complete with a string to replace this hole with, inside a <COMPLETION/> XML tag, including context-aware indentation, if needed.
All completions MUST be truthful, accurate, well-written and correct.
## EXAMPLE QUERY:

<QUERY>
function sum_evens(lim) {
var sum = 0;
for (var i = 0; i < lim; ++i) {
{{FILL_HERE}}
}
return sum;
}
</QUERY>

TASK: Fill the {{FILL_HERE}} hole.

## CORRECT COMPLETION

<COMPLETION>if (i % 2 === 0) {
sum += i;
}</COMPLETION>

## EXAMPLE QUERY:

<QUERY>
def sum_list(lst):
total = 0
for x in lst:
{{FILL_HERE}}
return total

print sum_list([1, 2, 3])
</QUERY>

## CORRECT COMPLETION:

<COMPLETION> total += x</COMPLETION>

## EXAMPLE QUERY:

<QUERY>
// data Tree a = Node (Tree a) (Tree a) | Leaf a

// sum :: Tree Int -> Int
// sum (Node lft rgt) = sum lft + sum rgt
// sum (Leaf val) = val

// convert to TypeScript:
{{FILL_HERE}}
</QUERY>

## CORRECT COMPLETION:

<COMPLETION>type Tree<T>
= {$:"Node", lft: Tree<T>, rgt: Tree<T>}
| {$:"Leaf", val: T};

function sum(tree: Tree<number>): number {
switch (tree.$) {
case "Node":
return sum(tree.lft) + sum(tree.rgt);
case "Leaf":
return tree.val;
}
}</COMPLETION>

## EXAMPLE QUERY:

The 5th {{FILL_HERE}} is Jupiter.

## CORRECT COMPLETION:

<COMPLETION>planet from the Sun</COMPLETION>

## EXAMPLE QUERY:

function hypothenuse(a, b) {
return Math.sqrt({{FILL_HERE}}b ** 2);
}

## CORRECT COMPLETION:

<COMPLETION>a ** 2 + </COMPLETION>
`;
}

userPrompt(ctx: AutoCompleteContext): string {
let context = '';
if (ctx.filename !== '') {
context += `// Filename: "${ctx.filename}" \n`;
}
if (ctx.language !== '') {
context += `// Programming language: "${ctx.language}" \n`;
}
return `${context}<QUERY>\n${ctx.textBeforeCursor}{{FILL_HERE}}${ctx.textAfterCursor}\n</QUERY>\nTASK: Fill the {{FILL_HERE}} hole. Answer only with the CORRECT completion, and NOTHING ELSE. Do it now.\n<COMPLETION>`;
}

prompt(params: AutoCompleteContext): PromptArgs {
return {
messages: [
{ role: "system", content: this.systemPrompt() },
{ role: "user", content: this.userPrompt(params) },
],
};
}
}

120 changes: 14 additions & 106 deletions src/autocomplete/holeFiller.ts
Original file line number Diff line number Diff line change
@@ -1,110 +1,18 @@
import { type AutoCompleteContext } from "./context";
import { Prompt } from 'ai';
import { ProviderOptions } from '@ai-sdk/provider-utils';

export interface HoleFiller {
systemPrompt(): string
userPrompt(params: AutoCompleteContext): string
}

// Source: continue/core/autocomplete/templating/AutocompleteTemplate.ts (holeFillerTemplate)
export class DefaultHoleFiller implements HoleFiller {
systemPrompt(): string {
// From https://github.com/VictorTaelin/AI-scripts
return `You are a HOLE FILLER. You are provided with a file containing holes, formatted as '{{HOLE_NAME}}'.
Your TASK is to complete with a string to replace this hole with, inside a <COMPLETION/> XML tag, including context-aware indentation, if needed.
All completions MUST be truthful, accurate, well-written and correct.
## EXAMPLE QUERY:

<QUERY>
function sum_evens(lim) {
var sum = 0;
for (var i = 0; i < lim; ++i) {
{{FILL_HERE}}
}
return sum;
}
</QUERY>

TASK: Fill the {{FILL_HERE}} hole.

## CORRECT COMPLETION

<COMPLETION>if (i % 2 === 0) {
sum += i;
}</COMPLETION>

## EXAMPLE QUERY:

<QUERY>
def sum_list(lst):
total = 0
for x in lst:
{{FILL_HERE}}
return total

print sum_list([1, 2, 3])
</QUERY>

## CORRECT COMPLETION:

<COMPLETION> total += x</COMPLETION>

## EXAMPLE QUERY:

<QUERY>
// data Tree a = Node (Tree a) (Tree a) | Leaf a

// sum :: Tree Int -> Int
// sum (Node lft rgt) = sum lft + sum rgt
// sum (Leaf val) = val

// convert to TypeScript:
{{FILL_HERE}}
</QUERY>

## CORRECT COMPLETION:

<COMPLETION>type Tree<T>
= {$:"Node", lft: Tree<T>, rgt: Tree<T>}
| {$:"Leaf", val: T};

function sum(tree: Tree<number>): number {
switch (tree.$) {
case "Node":
return sum(tree.lft) + sum(tree.rgt);
case "Leaf":
return tree.val;
}
}</COMPLETION>

## EXAMPLE QUERY:

The 5th {{FILL_HERE}} is Jupiter.

## CORRECT COMPLETION:

<COMPLETION>planet from the Sun</COMPLETION>

## EXAMPLE QUERY:

function hypothenuse(a, b) {
return Math.sqrt({{FILL_HERE}}b ** 2);
}

## CORRECT COMPLETION:

<COMPLETION>a ** 2 + </COMPLETION>
`;
}

userPrompt(ctx: AutoCompleteContext): string {
let context = '';
if (ctx.filename !== '') {
context += `// Filename: "${ctx.filename}" \n`;
}
if (ctx.language !== '') {
context += `// Programming language: "${ctx.language}" \n`;
}
return `${context}<QUERY>\n${ctx.textBeforeCursor}{{FILL_HERE}}${ctx.textAfterCursor}\n</QUERY>\nTASK: Fill the {{FILL_HERE}} hole. Answer only with the CORRECT completion, and NOTHING ELSE. Do it now.\n<COMPLETION>`;
}
prompt(params: AutoCompleteContext): PromptArgs
}

export type PromptArgs = Prompt & {
providerOptions?: ProviderOptions;
};

export type AutoCompleteContext = {
textBeforeCursor: string,
textAfterCursor: string,
currentLineText: string,
filename?: string,
language?: string,
}
3 changes: 3 additions & 0 deletions src/autocomplete/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
export * from './defaultHoleFiller';
export * from './mistralfimHoleFiller';
export * from './holeFiller';
14 changes: 14 additions & 0 deletions src/autocomplete/mistralfimHoleFiller.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import { HoleFiller, PromptArgs, AutoCompleteContext } from "./holeFiller";

export class MistralFimHoleFiller implements HoleFiller {
prompt(params: AutoCompleteContext): PromptArgs {
return {
prompt: params.textBeforeCursor,
providerOptions: {
'mistral.fim': {
suffix: params.textAfterCursor,
}
}
};
}
}
24 changes: 24 additions & 0 deletions src/providers/codestral.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import { ProfileWithAPIKey, ProviderConnection, Model } from "../types";
import { type LanguageModelV2 } from "@ai-sdk/provider";
import { LanguageModelProvider } from "./providers";
import { createMistralFim } from 'ai-sdk-mistral-fim';

export class CodestralProvider implements LanguageModelProvider {
languageModel(profile: ProfileWithAPIKey): LanguageModelV2 {
return createMistralFim({
baseURL: profile.baseURL,
apiKey: profile.apiKey,
})(profile.modelId);
}

async listModels(_conn: ProviderConnection): Promise<Model[]> {
return new Promise((resolve) => {
resolve([
{
id: 'codestral-latest',
name: 'codestral-latest'
},
]);
});
}
}
11 changes: 8 additions & 3 deletions src/providers/providers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { ProfileWithAPIKey, Provider, ProviderConnection, ProviderID, Model } fr
import { OpenAICompatibleProvider } from "./openaiCompatible";
import { OllamaProvider } from "./ollama";
import { type LanguageModelV2 } from "@ai-sdk/provider";
import { CodestralProvider } from "./codestral";

export interface LanguageModelProvider {
languageModel(profile: ProfileWithAPIKey): LanguageModelV2
Expand All @@ -17,10 +18,11 @@ function languageModelProvider(providerId: ProviderID): LanguageModelProvider {
case 'groq':
case 'openai-compatible':
case 'mistral':
case 'mistral-codestral': // TODO: we should support FIM endpoint.
return new OpenAICompatibleProvider();
return new OpenAICompatibleProvider();
case 'ollama':
return new OllamaProvider();
return new OllamaProvider();
case 'mistral-codestral':
return new CodestralProvider();
default:
throw new Error(`Unsupported provider: ${providerId}`);
}
Expand All @@ -34,6 +36,9 @@ export function getLanguageModelFromProfile(profile: ProfileWithAPIKey): Languag
return languageModelProvider(profile.provider).languageModel(profile);
}

export function isFimProvider(provider: ProviderID): boolean {
return provider === 'mistral-codestral';
}

export const providers: Provider[] = [
{
Expand Down
Loading
Loading