Skip to content

Commit

Permalink
Add Llama 2 prompt for Hugging Face. (#380)
Browse files Browse the repository at this point in the history
This adds a `experimental_buildLlama2Prompt` method which converts `messages` into a llama 2 prompt
  • Loading branch information
philschmid committed Jul 24, 2023
1 parent 4a8c5ef commit f50d9ef
Show file tree
Hide file tree
Showing 4 changed files with 84 additions and 1 deletion.
5 changes: 5 additions & 0 deletions .changeset/cool-terms-allow.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'ai': patch
---

Add experimental_buildLlama2Prompt helper for Hugging Face
14 changes: 14 additions & 0 deletions docs/pages/docs/api-reference/prompts.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -41,3 +41,17 @@ const response = Hf.textGenerationStream({
inputs: experimental_buildStarChatBetaPrompt(messages)
})
```

## `experimental_buildLlama2Prompt`

Uses LLama 2 chat tokens (`[INST]`) to create a prompt, learn more in the [Hugging Face Blog on how to prompt Llama 2](https://huggingface.co/blog/llama2#how-to-prompt-llama-2). If a `Message` with an unsupported `role` is passed, an error will be thrown.

```ts filename="route.ts" {6}
import { experimental_buildLlama2Prompt } from 'ai/prompts'

const { messages } = await req.json()
const response = Hf.textGenerationStream({
model: 'meta-llama/Llama-2-7b-chat-hf',
inputs: experimental_buildLlama2Prompt(messages)
})
```
39 changes: 38 additions & 1 deletion packages/core/prompts/huggingface.test.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import {
experimental_buildOpenAssistantPrompt,
experimental_buildStarChatBetaPrompt
experimental_buildStarChatBetaPrompt,
experimental_buildLlama2Prompt
} from './huggingface'
import type { Message } from '../shared/types'

Expand Down Expand Up @@ -45,3 +46,39 @@ describe('buildOpenAssistantPrompt', () => {
expect(() => experimental_buildOpenAssistantPrompt(messages)).toThrow()
})
})

describe('buildLlamaPrompt', () => {
it('should return a string with user instruction', () => {
const messages: Pick<Message, 'content' | 'role'>[] = [
{ content: 'Hello, how are you?', role: 'user' }
]

const expectedPrompt = '<s>[INST] Hello, how are you? [/INST]'
const prompt = experimental_buildLlama2Prompt(messages)
expect(prompt).toEqual(expectedPrompt)
})

it('should return a string with system, user and assistant messages', () => {
const messages: Pick<Message, 'content' | 'role'>[] = [
{
content: 'You are helpful assistant, but you are drunk, hick',
role: 'system'
},
{ content: 'Hi there!', role: 'user' },
{ content: 'Sup, partner!', role: 'assistant' },
{ content: 'What are you doing?', role: 'user' }
]

const expectedPrompt =
'<s>[INST] <<SYS>>\nYou are helpful assistant, but you are drunk, hick\n<</SYS>>\n\nHi there! [/INST] Sup, partner!</s><s>[INST] What are you doing? [/INST]'
const prompt = experimental_buildLlama2Prompt(messages)
expect(prompt).toEqual(expectedPrompt)
})

it('should throw an error if a function message is included', () => {
const messages: Pick<Message, 'content' | 'role'>[] = [
{ content: 'someFunction()', role: 'function' }
]
expect(() => experimental_buildLlama2Prompt(messages)).toThrow()
})
})
27 changes: 27 additions & 0 deletions packages/core/prompts/huggingface.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,3 +49,30 @@ export function experimental_buildOpenAssistantPrompt(
.join('') + '<|assistant|>'
)
}

/**
* A prompt constructor for HuggingFace LLama 2 chat models.
* Does not support `function` messages.
* @see https://huggingface.co/meta-llama/Llama-2-70b-chat-hf and https://huggingface.co/blog/llama2#how-to-prompt-llama-2
*/
export function experimental_buildLlama2Prompt(
messages: Pick<Message, 'content' | 'role'>[]
) {
const startPrompt = `<s>[INST] `
const endPrompt = ` [/INST]`
const conversation = messages.map(({ content, role }, index) => {
if (role === 'user') {
return content.trim()
} else if (role === 'assistant') {
return ` [/INST] ${content}</s><s>[INST] `
} else if (role === 'function') {
throw new Error('Llama 2 does not support function calls.')
} else if (role === 'system' && index === 0) {
return `<<SYS>>\n${content}\n<</SYS>>\n\n`
} else {
throw new Error(`Invalid message role: ${role}`)
}
})

return startPrompt + conversation.join('') + endPrompt
}

0 comments on commit f50d9ef

Please sign in to comment.