Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
3307: Semantic Ranges r=matklad a=kjeremy



Co-authored-by: Jeremy Kolb <kjeremy@gmail.com>
Co-authored-by: kjeremy <kjeremy@gmail.com>
  • Loading branch information
bors[bot] and kjeremy committed Feb 25, 2020
2 parents d3040c0 + fa355d6 commit ae0aeb1
Show file tree
Hide file tree
Showing 6 changed files with 131 additions and 41 deletions.
9 changes: 7 additions & 2 deletions crates/ra_ide/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -425,9 +425,14 @@ impl Analysis {
self.with_db(|db| runnables::runnables(db, file_id))
}

/// Computes syntax highlighting for the given file.
/// Computes syntax highlighting for the given file
pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> {
self.with_db(|db| syntax_highlighting::highlight(db, file_id))
self.with_db(|db| syntax_highlighting::highlight(db, file_id, None))
}

/// Computes syntax highlighting for the given file range.
pub fn highlight_range(&self, frange: FileRange) -> Cancelable<Vec<HighlightedRange>> {
self.with_db(|db| syntax_highlighting::highlight(db, frange.file_id, Some(frange.range)))
}

/// Computes syntax highlighting for the given file.
Expand Down
111 changes: 85 additions & 26 deletions crates/ra_ide/src/syntax_highlighting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ use ra_db::SourceDatabase;
use ra_ide_db::{defs::NameDefinition, RootDatabase};
use ra_prof::profile;
use ra_syntax::{
ast, AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken, TextRange,
WalkEvent, T,
ast, AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken,
TextRange, WalkEvent, T,
};
use rustc_hash::FxHashMap;

Expand Down Expand Up @@ -67,8 +67,13 @@ fn is_control_keyword(kind: SyntaxKind) -> bool {
}
}

pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> {
pub(crate) fn highlight(
db: &RootDatabase,
file_id: FileId,
range: Option<TextRange>,
) -> Vec<HighlightedRange> {
let _p = profile("highlight");

let parse = db.parse(file_id);
let root = parse.tree().syntax().clone();

Expand All @@ -79,22 +84,56 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa

let mut in_macro_call = None;

// Determine the root based on the given range.
let (root, highlight_range) = if let Some(range) = range {
let root = match root.covering_element(range) {
NodeOrToken::Node(node) => node,
NodeOrToken::Token(token) => token.parent(),
};
(root, range)
} else {
(root.clone(), root.text_range())
};

for event in root.preorder_with_tokens() {
match event {
WalkEvent::Enter(node) => match node.kind() {
MACRO_CALL => {
in_macro_call = Some(node.clone());
if let Some(range) = highlight_macro(InFile::new(file_id.into(), node)) {
res.push(HighlightedRange { range, tag: tags::MACRO, binding_hash: None });
}
WalkEvent::Enter(node) => {
if node.text_range().intersection(&highlight_range).is_none() {
continue;
}
_ if in_macro_call.is_some() => {
if let Some(token) = node.as_token() {
if let Some((tag, binding_hash)) = highlight_token_tree(

match node.kind() {
MACRO_CALL => {
in_macro_call = Some(node.clone());
if let Some(range) = highlight_macro(InFile::new(file_id.into(), node)) {
res.push(HighlightedRange {
range,
tag: tags::MACRO,
binding_hash: None,
});
}
}
_ if in_macro_call.is_some() => {
if let Some(token) = node.as_token() {
if let Some((tag, binding_hash)) = highlight_token_tree(
&mut sb,
&analyzer,
&mut bindings_shadow_count,
InFile::new(file_id.into(), token.clone()),
) {
res.push(HighlightedRange {
range: node.text_range(),
tag,
binding_hash,
});
}
}
}
_ => {
if let Some((tag, binding_hash)) = highlight_node(
&mut sb,
&analyzer,
&mut bindings_shadow_count,
InFile::new(file_id.into(), token.clone()),
InFile::new(file_id.into(), node.clone()),
) {
res.push(HighlightedRange {
range: node.text_range(),
Expand All @@ -104,17 +143,12 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
}
}
}
_ => {
if let Some((tag, binding_hash)) = highlight_node(
&mut sb,
&mut bindings_shadow_count,
InFile::new(file_id.into(), node.clone()),
) {
res.push(HighlightedRange { range: node.text_range(), tag, binding_hash });
}
}
},
}
WalkEvent::Leave(node) => {
if node.text_range().intersection(&highlight_range).is_none() {
continue;
}

if let Some(m) = in_macro_call.as_ref() {
if *m == node {
in_macro_call = None;
Expand Down Expand Up @@ -265,7 +299,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
)
}

let mut ranges = highlight(db, file_id);
let mut ranges = highlight(db, file_id, None);
ranges.sort_by_key(|it| it.range.start());
// quick non-optimal heuristic to intersect token ranges and highlighted ranges
let mut frontier = 0;
Expand Down Expand Up @@ -374,7 +408,10 @@ mod tests {

use test_utils::{assert_eq_text, project_dir, read_text};

use crate::mock_analysis::{single_file, MockAnalysis};
use crate::{
mock_analysis::{single_file, MockAnalysis},
FileRange, TextRange,
};

#[test]
fn test_highlighting() {
Expand Down Expand Up @@ -475,4 +512,26 @@ fn bar() {
let _ = host.analysis().highlight(file_id).unwrap();
// eprintln!("elapsed: {:?}", t.elapsed());
}

#[test]
fn test_ranges() {
let (analysis, file_id) = single_file(
r#"
#[derive(Clone, Debug)]
struct Foo {
pub x: i32,
pub y: i32,
}"#,
);

// The "x"
let highlights = &analysis
.highlight_range(FileRange {
file_id,
range: TextRange::offset_len(82.into(), 1.into()),
})
.unwrap();

assert_eq!(highlights[0].tag, "field");
}
}
16 changes: 9 additions & 7 deletions crates/rust-analyzer/src/caps.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@ use lsp_types::{
CompletionOptions, DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability,
ImplementationProviderCapability, RenameOptions, RenameProviderCapability, SaveOptions,
SelectionRangeProviderCapability, SemanticTokensDocumentProvider, SemanticTokensLegend,
SemanticTokensOptions, SemanticTokensServerCapabilities, ServerCapabilities,
SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind,
TextDocumentSyncOptions, TypeDefinitionProviderCapability, WorkDoneProgressOptions,
SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions, TextDocumentSyncCapability,
TextDocumentSyncKind, TextDocumentSyncOptions, TypeDefinitionProviderCapability,
WorkDoneProgressOptions,
};

pub fn server_capabilities() -> ServerCapabilities {
Expand Down Expand Up @@ -60,7 +60,7 @@ pub fn server_capabilities() -> ServerCapabilities {
execute_command_provider: None,
workspace: None,
call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)),
semantic_tokens_provider: Some(SemanticTokensServerCapabilities::SemanticTokensOptions(
semantic_tokens_provider: Some(
SemanticTokensOptions {
legend: SemanticTokensLegend {
token_types: semantic_tokens::supported_token_types().iter().cloned().collect(),
Expand All @@ -71,9 +71,11 @@ pub fn server_capabilities() -> ServerCapabilities {
},

document_provider: Some(SemanticTokensDocumentProvider::Bool(true)),
..SemanticTokensOptions::default()
},
)),
range_provider: Some(true),
work_done_progress_options: Default::default(),
}
.into(),
),
experimental: Default::default(),
}
}
3 changes: 2 additions & 1 deletion crates/rust-analyzer/src/main_loop.rs
Original file line number Diff line number Diff line change
Expand Up @@ -527,8 +527,9 @@ fn on_request(
.on::<req::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)?
.on::<req::CallHierarchyIncomingCalls>(handlers::handle_call_hierarchy_incoming)?
.on::<req::CallHierarchyOutgoingCalls>(handlers::handle_call_hierarchy_outgoing)?
.on::<req::Ssr>(handlers::handle_ssr)?
.on::<req::SemanticTokensRequest>(handlers::handle_semantic_tokens)?
.on::<req::SemanticTokensRangeRequest>(handlers::handle_semantic_tokens_range)?
.on::<req::Ssr>(handlers::handle_ssr)?
.finish();
Ok(())
}
Expand Down
26 changes: 24 additions & 2 deletions crates/rust-analyzer/src/main_loop/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ use lsp_types::{
Diagnostic, DocumentFormattingParams, DocumentHighlight, DocumentSymbol, FoldingRange,
FoldingRangeParams, Hover, HoverContents, Location, MarkupContent, MarkupKind, Position,
PrepareRenameResponse, Range, RenameParams, SemanticTokenModifier, SemanticTokenType,
SemanticTokens, SemanticTokensParams, SemanticTokensResult, SymbolInformation,
TextDocumentIdentifier, TextEdit, WorkspaceEdit,
SemanticTokens, SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult,
SemanticTokensResult, SymbolInformation, TextDocumentIdentifier, TextEdit, WorkspaceEdit,
};
use ra_ide::{
AssistId, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind,
Expand Down Expand Up @@ -1092,3 +1092,25 @@ pub fn handle_semantic_tokens(

Ok(Some(tokens.into()))
}

pub fn handle_semantic_tokens_range(
world: WorldSnapshot,
params: SemanticTokensRangeParams,
) -> Result<Option<SemanticTokensRangeResult>> {
let _p = profile("handle_semantic_tokens_range");

let frange = (&params.text_document, params.range).try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(frange.file_id)?;

let mut builder = SemanticTokensBuilder::default();

for h in world.analysis().highlight_range(frange)?.into_iter() {
let type_and_modifiers: (SemanticTokenType, Vec<SemanticTokenModifier>) = h.tag.conv();
let (token_type, token_modifiers) = type_and_modifiers.conv();
builder.push(h.range.conv_with(&line_index), token_type, token_modifiers);
}

let tokens = SemanticTokens { data: builder.build(), ..Default::default() };

Ok(Some(tokens.into()))
}
7 changes: 4 additions & 3 deletions crates/rust-analyzer/src/req.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,10 @@ pub use lsp_types::{
DocumentSymbolResponse, FileSystemWatcher, Hover, InitializeResult, MessageType,
PartialResultParams, ProgressParams, ProgressParamsValue, ProgressToken,
PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams, SelectionRange,
SelectionRangeParams, SemanticTokensParams, SemanticTokensResult, ServerCapabilities,
ShowMessageParams, SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams,
TextEdit, WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams,
SelectionRangeParams, SemanticTokensParams, SemanticTokensRangeParams,
SemanticTokensRangeResult, SemanticTokensResult, ServerCapabilities, ShowMessageParams,
SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams, TextEdit,
WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams,
};

pub enum AnalyzerStatus {}
Expand Down

0 comments on commit ae0aeb1

Please sign in to comment.