diff --git a/src/vs/workbench/contrib/codeEditor/browser/inspectEditorTokens/inspectEditorTokens.ts b/src/vs/workbench/contrib/codeEditor/browser/inspectEditorTokens/inspectEditorTokens.ts index 8b74f73d90777..e75cfc1a0b6c3 100644 --- a/src/vs/workbench/contrib/codeEditor/browser/inspectEditorTokens/inspectEditorTokens.ts +++ b/src/vs/workbench/contrib/codeEditor/browser/inspectEditorTokens/inspectEditorTokens.ts @@ -430,7 +430,7 @@ class InspectEditorTokensWidget extends Disposable implements IContentWidget { if (captures && captures.length > 0) { dom.append(tbody, $('tr', undefined, $('td.tiw-metadata-key', undefined, 'foreground'), - $('td.tiw-metadata-value', undefined, captures[0].name), + $('td.tiw-metadata-value', undefined, captures[captures.length - 1].name), )); } } diff --git a/src/vs/workbench/services/treeSitter/browser/treeSitterTokenizationFeature.ts b/src/vs/workbench/services/treeSitter/browser/treeSitterTokenizationFeature.ts index 44b591daf4a6d..392dc452393ff 100644 --- a/src/vs/workbench/services/treeSitter/browser/treeSitterTokenizationFeature.ts +++ b/src/vs/workbench/services/treeSitter/browser/treeSitterTokenizationFeature.ts @@ -160,6 +160,12 @@ class TreeSitterTokenizationSupport extends Disposable implements ITreeSitterTok let tokenIndex = 0; const lineStartOffset = textModel.getOffsetAt({ lineNumber: lineNumber, column: 1 }); + const increaseSizeOfTokensByOneToken = () => { + const newTokens = new Uint32Array(tokens.length + 2); + newTokens.set(tokens); + tokens = newTokens; + }; + for (let captureIndex = 0; captureIndex < captures.length; captureIndex++) { const capture = captures[captureIndex]; const metadata = this.findMetadata(capture.name); @@ -178,17 +184,36 @@ class TreeSitterTokenizationSupport extends Disposable implements ITreeSitterTok } const intermediateTokenOffset = lineRelativeOffset - currentTokenLength; if (previousTokenEnd < intermediateTokenOffset) { + // Add en empty token to cover the space where there were no captures tokens[tokenIndex * 2] = intermediateTokenOffset; tokens[tokenIndex * 2 + 1] = 0; tokenIndex++; - const newTokens = new Uint32Array(tokens.length + 2); - newTokens.set(tokens); - tokens = newTokens; + + increaseSizeOfTokensByOneToken(); } - tokens[tokenIndex * 2] = lineRelativeOffset; - tokens[tokenIndex * 2 + 1] = metadata; - tokenIndex++; + const addCurrentTokenToArray = () => { + tokens[tokenIndex * 2] = lineRelativeOffset; + tokens[tokenIndex * 2 + 1] = metadata; + tokenIndex++; + }; + + if (previousTokenEnd > lineRelativeOffset) { + // The current token is within the previous token. Adjust the end of the previous token. + const originalPreviousTokenEndOffset = tokens[(tokenIndex - 1) * 2]; + tokens[(tokenIndex - 1) * 2] = intermediateTokenOffset; + + addCurrentTokenToArray(); + // Add the rest of the previous token after the current token + increaseSizeOfTokensByOneToken(); + tokens[tokenIndex * 2] = originalPreviousTokenEndOffset; + tokens[tokenIndex * 2 + 1] = tokens[(tokenIndex - 2) * 2 + 1]; + tokenIndex++; + + } else { + // Just add the token to the array + addCurrentTokenToArray(); + } } if (captures[captures.length - 1].node.endPosition.column + 1 < lineLength) {