Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We鈥檒l occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cody completion: Mega bug bash thread #52365

Merged
merged 8 commits into from
May 24, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions client/cody/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ All notable changes to Sourcegraph Cody will be documented in this file.

### Changed

- Various Cody completions related improvements [pull/52365](https://github.com/sourcegraph/sourcegraph/pull/52365)

## [0.1.4]

### Added
Expand Down
14 changes: 14 additions & 0 deletions client/cody/src/completions/cache.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,18 @@ describe('CompletionsCache', () => {
expect(cache.get('foo \n')).toEqual([{ prefix: 'foo \n', content: 'bar', messages: [] }])
expect(cache.get('foo ')).toEqual(undefined)
})

it('has a lookup function for untrimmed prefixes', () => {
const cache = new CompletionsCache()
cache.add([{ prefix: 'foo\n ', content: 'baz', messages: [] }])

expect(cache.get('foo\n ', false)).toEqual([
{
prefix: 'foo\n ',
content: 'baz',
messages: [],
},
])
expect(cache.get('foo\n ', false)).toEqual(undefined)
})
})
26 changes: 18 additions & 8 deletions client/cody/src/completions/cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ export class CompletionsCache {
// account. We need to add additional information like file path or suffix
// to make sure the cache does not return undesired results for other files
// in the same project.
public get(prefix: string): Completion[] | undefined {
const trimmedPrefix = trimEndAfterLastNewline(prefix)
public get(prefix: string, trim: boolean = true): Completion[] | undefined {
const trimmedPrefix = trim ? trimEndAfterLastNewline(prefix) : prefix
const results = this.cache.get(trimmedPrefix)
if (results) {
return results.map(result => {
Expand Down Expand Up @@ -47,17 +47,27 @@ export class CompletionsCache {
maxCharsAppended = completion.content.length
}

// We also cache the completion with the exact (= untrimmed) prefix
// for the separate lookup mode used for deletions
if (trimEndAfterLastNewline(completion.prefix) !== completion.prefix) {
this.insertCompletion(completion.prefix, completion)
}

for (let i = 0; i <= maxCharsAppended; i++) {
const key = trimEndAfterLastNewline(completion.prefix) + completion.content.slice(0, i)
if (!this.cache.has(key)) {
this.cache.set(key, [completion])
} else {
const existingCompletions = this.cache.get(key)!
existingCompletions.push(completion)
}
this.insertCompletion(key, completion)
}
}
}

private insertCompletion(key: string, completion: Completion): void {
if (!this.cache.has(key)) {
this.cache.set(key, [completion])
} else {
const existingCompletions = this.cache.get(key)!
existingCompletions.push(completion)
}
}
}

function trimEndAfterLastNewline(text: string): string {
Expand Down
80 changes: 57 additions & 23 deletions client/cody/src/completions/index.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import { LRUCache } from 'lru-cache'
import * as vscode from 'vscode'

import { Message } from '@sourcegraph/cody-shared/src/sourcegraph-api'
import { SourcegraphNodeCompletionsClient } from '@sourcegraph/cody-shared/src/sourcegraph-api/completions/nodeClient'

import { logEvent } from '../event-logger'
import { debug } from '../log'

import { CompletionsCache } from './cache'
import { getContext } from './context'
Expand All @@ -19,7 +21,6 @@ function lastNLines(text: string, n: number): string {
return lines.slice(Math.max(0, lines.length - n)).join('\n')
}

const estimatedLLMResponseLatencyMS = 700
const inlineCompletionsCache = new CompletionsCache()

export class CodyCompletionItemProvider implements vscode.InlineCompletionItemProvider {
Expand All @@ -28,6 +29,9 @@ export class CodyCompletionItemProvider implements vscode.InlineCompletionItemPr
private maxSuffixTokens: number
private abortOpenInlineCompletions: () => void = () => {}
private abortOpenMultilineCompletion: () => void = () => {}
private lastContentChanges: LRUCache<string, 'add' | 'del'> = new LRUCache<string, 'add' | 'del'>({
max: 10,
})

constructor(
private webviewErrorMessager: (error: string) => Promise<void>,
Expand All @@ -43,6 +47,13 @@ export class CodyCompletionItemProvider implements vscode.InlineCompletionItemPr
this.promptTokens = this.contextWindowTokens - this.responseTokens
this.maxPrefixTokens = Math.floor(this.promptTokens * this.prefixPercentage)
this.maxSuffixTokens = Math.floor(this.promptTokens * this.suffixPercentage)

vscode.workspace.onDidChangeTextDocument(event => {
const document = event.document
const text = event.contentChanges[0].text

this.lastContentChanges.set(document.fileName, text.length > 0 ? 'add' : 'del')
})
}

public async provideInlineCompletionItems(
Expand All @@ -54,12 +65,11 @@ export class CodyCompletionItemProvider implements vscode.InlineCompletionItemPr
try {
return await this.provideInlineCompletionItemsInner(document, position, context, token)
} catch (error) {
console.error(error)

if (error.message === 'aborted') {
return []
}

console.error(error)
debug('CodyCompletionProvider:inline:error', `${error.toString()}\n${error.stack}`)
return []
}
}
Expand Down Expand Up @@ -96,6 +106,21 @@ export class CodyCompletionItemProvider implements vscode.InlineCompletionItemPr

const { prefix, suffix, prevLine: precedingLine } = docContext

// Avoid showing completions when we're deleting code (Cody can only insert code at the
// moment)
const lastChange = this.lastContentChanges.get(document.fileName) ?? 'add'
if (lastChange === 'del') {
// When a line was deleted, only look up cached items and only include them if the
// untruncated prefix matches. This fixes some weird issues where the completion would
// render if you insert whitespace but not on the original place when you delete it
// again
const cachedCompletions = inlineCompletionsCache.get(prefix, false)
if (cachedCompletions) {
return cachedCompletions.map(toInlineCompletionItem)
}
return []
}
Comment on lines +109 to +122
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

New heuristics: Avoid showing completions when we're deleting code

There is only one exception and that is when we have a cached completion at exactly the same prompt. This necessary since you could be inserting spaces and then pressing backspace to get back to were you were before (exactly when you triggered the completion) and we currently show the same completion while you insert more backspace but then would hide it when you go back.

Here's a video of the new behavior:

Screen.Recording.2023-05-24.at.14.12.59.mov

Notice there is a bug right now because we add a cache entry for the completion with a trimmed prefix and that is now also matching in this logic. You can see this at the end of the video where removing all indentation from the line reveals the suggestion again. We need some extra bookkeeping to fix this so I decided it's not worth doing it just yet. WDYT?

Todo for myself: Add a todo about that in code


const cachedCompletions = inlineCompletionsCache.get(prefix)
if (cachedCompletions) {
return cachedCompletions.map(toInlineCompletionItem)
Expand Down Expand Up @@ -125,7 +150,7 @@ export class CodyCompletionItemProvider implements vscode.InlineCompletionItemPr
contextChars
)

let waitMs: number
let timeout: number
const completers: CompletionProvider[] = []

// VS Code does not show completions if we are in the process of writing a word or if a
Expand All @@ -138,17 +163,16 @@ export class CodyCompletionItemProvider implements vscode.InlineCompletionItemPr

// TODO(philipp-spiess): Add a better detection for start-of-block and don't require C like
// languages.
// TODO(philipp-spiess): Temporary delete multi-line inline completions while I fix a bug
const multilineEnabledLanguage = false
// document.languageId === 'typescript' || document.languageId === 'javascript' || document.languageId === 'go'
const multilineEnabledLanguage =
document.languageId === 'typescript' || document.languageId === 'javascript' || document.languageId === 'go'
if (
multilineEnabledLanguage &&
// Only trigger multiline inline suggestions for empty lines
precedingLine.trim() === '' &&
// Only trigger multiline inline suggestions for the beginning of blocks
prefix.trim().at(prefix.trim().length - 1) === '{'
) {
waitMs = 500
timeout = 500
completers.push(
new EndOfLineCompletionProvider(
this.completionsClient,
Expand All @@ -159,12 +183,12 @@ export class CodyCompletionItemProvider implements vscode.InlineCompletionItemPr
suffix,
'',
3,
true // multiline
'block' // multiline
)
)
} else if (precedingLine.trim() === '') {
// Start of line: medium debounce
waitMs = 500
timeout = 200
completers.push(
new EndOfLineCompletionProvider(
this.completionsClient,
Expand All @@ -182,7 +206,7 @@ export class CodyCompletionItemProvider implements vscode.InlineCompletionItemPr
return []
} else {
// End of line: long debounce, complete until newline
waitMs = 1000
timeout = 500
completers.push(
new EndOfLineCompletionProvider(
this.completionsClient,
Expand All @@ -209,11 +233,7 @@ export class CodyCompletionItemProvider implements vscode.InlineCompletionItemPr
)
}

// TODO(beyang): trigger on context quality (better context means longer completion)

await new Promise<void>(resolve =>
setTimeout(() => resolve(), Math.max(0, waitMs - estimatedLLMResponseLatencyMS))
)
Comment on lines -214 to -216
Copy link
Contributor Author

@philipp-spiess philipp-spiess May 24, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug fix: The previous heuristics would use the waitMs and subtract the estimated LLM response. I looked at prod data for dotcom and the estimate was overshooting it but also I noticed that the waitMs was set to 500 before and the latency to 700 so we would not debounce in some situations 馃槵

I've tweaked the timeouts again. It will unfortunlaty be a bit slower but in most cases it's not noticeable. Start of the line completions are now properly debounced by 200ms though!

await new Promise<void>(resolve => setTimeout(resolve, timeout))

// We don't need to make a request at all if the signal is already aborted after the
// debounce
Expand All @@ -222,14 +242,19 @@ export class CodyCompletionItemProvider implements vscode.InlineCompletionItemPr
}

logEvent('CodyVSCodeExtension:completion:started', LOG_INLINE, LOG_INLINE)
const start = Date.now()

const results = (await Promise.all(completers.map(c => c.generateCompletions(abortController.signal)))).flat()

inlineCompletionsCache.add(results)

logEvent('CodyVSCodeExtension:completion:suggested', LOG_INLINE, LOG_INLINE)
const results = rankCompletions(
(await Promise.all(completers.map(c => c.generateCompletions(abortController.signal)))).flat()
)

return results.map(toInlineCompletionItem)
if (hasVisibleCompletions(results)) {
const params = { ...LOG_INLINE, latency: Date.now() - start, timeout }
logEvent('CodyVSCodeExtension:completion:suggested', params, params)
inlineCompletionsCache.add(results)
return results.map(toInlineCompletionItem)
Comment on lines +251 to +255
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug fix: We were recording completions as being suggested even though they might all be empty.

I've also added some logging to get a better understanding of latency and timeouts used.

}
return []
}

public async fetchAndShowCompletions(): Promise<void> {
Expand Down Expand Up @@ -410,3 +435,12 @@ function toInlineCompletionItem(completion: Completion): vscode.InlineCompletion
command: 'cody.completions.inline.accepted',
})
}

function rankCompletions(completions: Completion[]): Completion[] {
// TODO(philipp-spiess): Improve ranking to something more complex then just length
return completions.sort((a, b) => b.content.split('\n').length - a.content.split('\n').length)
}
Comment on lines +439 to +442
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

New heuristics: Added ranking for completions.

It's very stupid now and prefers whatever has the longes lines of code. Lol.

Got some more ideas lined up to try out, though!


function hasVisibleCompletions(completions: Completion[]): boolean {
return completions.length > 0 && !!completions.find(c => c.content.trim() !== '')
}
Loading
Loading