Skip to content

Commit

Permalink
Begin mistral testing
Browse files Browse the repository at this point in the history
  • Loading branch information
Corina Gum committed Mar 27, 2024
1 parent d66f89c commit a07c11f
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 11 deletions.
20 changes: 18 additions & 2 deletions js/packages/teams-ai/src/models/LlamaModel.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,13 @@ import { PromptCompletionModel, PromptResponse } from '../models';
import { Memory } from '../MemoryFork';
import { Message, PromptFunctions, PromptTemplate } from '../prompts';
import { Tokenizer } from '../tokenizers';
import { Colorize } from '../internals/Colorize';
import colorizeJson from 'json-colorizer';

export interface LlamaModelOptions {
apiKey: string;
endpoint: string;
logRequests: boolean;
}

export class LlamaModel implements PromptCompletionModel {
Expand Down Expand Up @@ -44,18 +47,31 @@ export class LlamaModel implements PromptCompletionModel {
}

let last: Message | undefined = result.output[result.output.length - 1];
console.log(Colorize.warning(`Hello ${JSON.stringify(result.output[result.output.length - 1])}`));
if (last?.role !== 'user') {
last = undefined;
}
let res;

if (this.options.logRequests) {
console.log(Colorize.title('CHAT PROMPT:'));
console.log(Colorize.output(result.output));
}

console.log(`parameters: ${colorizeJson(JSON.stringify(template.config.completion))}`);
try {
res = await this._httpClient.post<{ output: string }>(this.options.endpoint, {
res = await this._httpClient.post(this.options.endpoint, {
input_data: {
input_string: result.output,
parameters: template.config.completion
}
});
if (this.options.logRequests) {
console.log(Colorize.title('CHAT RESPONSE:'));
console.log(Colorize.value('status', res.status));
console.log(Colorize.output(res.data[0]));
console.log(Colorize.title(`~~~~~~~~~`));
}
} catch (err) {
console.error(err);
throw err;
Expand All @@ -66,7 +82,7 @@ export class LlamaModel implements PromptCompletionModel {
input: last,
message: {
role: 'assistant',
content: res!.data.output
content: res!.data[0]
}
};
}
Expand Down
1 change: 1 addition & 0 deletions js/samples/04.ai.g.LLAMA/devTools/teamsapptester
10 changes: 6 additions & 4 deletions js/samples/04.ai.g.LLAMA/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -74,14 +74,15 @@ interface ConversationState {
}
type ApplicationTurnState = TurnState<ConversationState>;

if (!process.env.LLAMA_API_KEY && !process.env.LLAMA_ENDPOINT) {
if (!process.env.MISTRAL_API_KEY && !process.env.MISTRAL_ENDPOINT) {
throw new Error('Missing environment variables - please check that LLAMA_API_KEY and LLAMA_ENDPOINT are set.');
}
// Create AI components
const model = new LlamaModel({
// Llama Support
apiKey: process.env.LLAMA_API_KEY!,
endpoint: process.env.LLAMA_ENDPOINT!
apiKey: process.env.MISTRAL_API_KEY!,
endpoint: process.env.MISTRAL_ENDPOINT!,
logRequests: true
});

const prompts = new PromptManager({
Expand All @@ -99,7 +100,8 @@ const storage = new MemoryStorage();
const app = new Application<ApplicationTurnState>({
storage,
ai: {
planner
planner,
allow_looping: false
}
});

Expand Down
11 changes: 7 additions & 4 deletions js/samples/04.ai.g.LLAMA/src/prompts/default/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,14 @@
"description": "A bot that can turn the lights on and off",
"type": "completion",
"completion": {
"model": "llama-2-7b-chat-18",
"temperature": 0.2,
"model": "mistralai-Mistral-7B-Instruct-v01",
"temperature": 0.0,
"top_p": 0.9,
"do_sample": true,
"max_new_tokens": 500
"do_sample": false,
"response_format": {
"type": "json_object"
},
"return_full_text": false
},
"augmentation": {
"augmentation_type": "sequence"
Expand Down
2 changes: 1 addition & 1 deletion js/samples/04.ai.g.LLAMA/src/prompts/default/skprompt.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
The following is a conversation with an AI assistant.
The assistant can turn a light on or off.
The assistant can turn a light on or off. The assistant can only manage lights. The assistant can only discuss lights.

context:
The lights are currently {{getLightStatus}}.

0 comments on commit a07c11f

Please sign in to comment.