Skip to content

Commit

Permalink
[WIP]: Support multiple files in text-document completeions and code …
Browse files Browse the repository at this point in the history
…actions
  • Loading branch information
SevInf committed May 10, 2024
1 parent b046ee1 commit 5bc901e
Show file tree
Hide file tree
Showing 12 changed files with 93 additions and 45 deletions.
33 changes: 19 additions & 14 deletions prisma-fmt/src/code_actions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ mod relations;

use lsp_types::{CodeActionOrCommand, CodeActionParams, Diagnostic, Range, TextEdit, WorkspaceEdit};
use psl::{
diagnostics::Span,
diagnostics::{FileId, Span},
parser_database::{
ast,
walkers::{ModelWalker, RefinedRelationWalker, ScalarFieldWalker},
Expand All @@ -14,24 +14,28 @@ use psl::{
schema_ast::ast::{Attribute, IndentationType, NewlineType, WithSpan},
PreviewFeature,
};
use std::{collections::HashMap, sync::Arc};
use std::collections::HashMap;

pub(crate) fn empty_code_actions() -> Vec<CodeActionOrCommand> {
Vec::new()
}

pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec<CodeActionOrCommand> {
pub(crate) fn available_actions(
schema_files: Vec<(String, SourceFile)>,
initiating_file_id: u32,
params: CodeActionParams,
) -> Vec<CodeActionOrCommand> {
let mut actions = Vec::new();

let file = SourceFile::new_allocated(Arc::from(schema.into_boxed_str()));

let validated_schema = psl::validate(file);
let validated_schema = psl::validate_multi_file(schema_files);

let config = &validated_schema.configuration;

let datasource = config.datasources.first();
let initiating_file_id = FileId(initiating_file_id);

for source in validated_schema.db.ast_assert_single().sources() {
let initiating_ast = validated_schema.db.ast(initiating_file_id);
for source in initiating_ast.sources() {
relation_mode::edit_referential_integrity(
&mut actions,
&params,
Expand All @@ -43,22 +47,22 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec
// models AND views
for model in validated_schema
.db
.walk_models()
.chain(validated_schema.db.walk_views())
.walk_models_in_file(initiating_file_id)
.chain(validated_schema.db.walk_views_in_file(initiating_file_id))
{
if config.preview_features().contains(PreviewFeature::MultiSchema) {
multi_schema::add_schema_block_attribute_model(
&mut actions,
&params,
validated_schema.db.source_assert_single(),
validated_schema.db.source(initiating_file_id),
config,
model,
);

multi_schema::add_schema_to_schemas(
&mut actions,
&params,
validated_schema.db.source_assert_single(),
validated_schema.db.source(initiating_file_id),
config,
model,
);
Expand All @@ -70,25 +74,26 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec
mongodb::add_native_for_auto_id(
&mut actions,
&params,
validated_schema.db.source_assert_single(),
validated_schema.db.source(initiating_file_id),
model,
datasource.unwrap(),
);
}
}

for enumerator in validated_schema.db.walk_enums() {
for enumerator in validated_schema.db.walk_enums_in_file(initiating_file_id) {
if config.preview_features().contains(PreviewFeature::MultiSchema) {
multi_schema::add_schema_block_attribute_enum(
&mut actions,
&params,
validated_schema.db.source_assert_single(),
validated_schema.db.source(initiating_file_id),
config,
enumerator,
)
}
}

// TODO: split actions by referncing/refernced side
for relation in validated_schema.db.walk_relations() {
if let RefinedRelationWalker::Inline(relation) = relation.refine() {
let complete_relation = match relation.as_complete() {
Expand Down
4 changes: 2 additions & 2 deletions prisma-fmt/src/get_config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ pub(crate) fn get_config(params: &str) -> Result<String, String> {
}

fn get_config_impl(params: GetConfigParams) -> Result<serde_json::Value, GetConfigError> {
let (files, mut config) =
psl::parse_configuration_multi_file(params.prisma_schema.into()).map_err(create_get_config_error)?;
let prisma_schema: Vec<_> = params.prisma_schema.into();
let (files, mut config) = psl::parse_configuration_multi_file(&prisma_schema).map_err(create_get_config_error)?;

if !params.ignore_env_var_errors {
let overrides: Vec<(_, _)> = params.datasource_overrides.into_iter().collect();
Expand Down
9 changes: 7 additions & 2 deletions prisma-fmt/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,20 @@ use schema_file_input::SchemaFileInput;
/// request](https://github.com/microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-16.md#textDocument_completion).
/// Input and output are both JSON, the request being a `CompletionParams` object and the response
/// being a `CompletionList` object.
pub fn text_document_completion(schema: String, params: &str) -> String {
pub fn text_document_completion(schema_files: String, initiating_file_id: u32, params: &str) -> String {
let params = if let Ok(params) = serde_json::from_str::<lsp_types::CompletionParams>(params) {
params
} else {
warn!("Failed to parse params to text_document_completion() as CompletionParams.");
return serde_json::to_string(&text_document_completion::empty_completion_list()).unwrap();
};

let completion_list = text_document_completion::completion(schema, params);
let Ok(input) = serde_json::from_str::<SchemaFileInput>(&schema_files) else {
warn!("Failed to parse schema file input");
return serde_json::to_string(&text_document_completion::empty_completion_list()).unwrap();
};

let completion_list = text_document_completion::completion(input.into(), initiating_file_id, params);

serde_json::to_string(&completion_list).unwrap()
}
Expand Down
44 changes: 28 additions & 16 deletions prisma-fmt/src/text_document_completion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,11 @@ use log::*;
use lsp_types::*;
use psl::{
datamodel_connector::Connector,
diagnostics::Span,
parse_configuration,
diagnostics::{FileId, Span},
parse_configuration_multi_file,
parser_database::{ast, ParserDatabase, SourceFile},
Configuration, Datasource, Diagnostics, Generator, PreviewFeature,
};
use std::sync::Arc;

use crate::position_to_offset;

Expand All @@ -21,18 +20,29 @@ pub(crate) fn empty_completion_list() -> CompletionList {
}
}

pub(crate) fn completion(schema: String, params: CompletionParams) -> CompletionList {
let source_file = SourceFile::new_allocated(Arc::from(schema.into_boxed_str()));
pub(crate) fn completion(
schema_files: Vec<(String, SourceFile)>,
initiating_doc_id: u32,
params: CompletionParams,
) -> CompletionList {
let doc = schema_files.get(initiating_doc_id as usize).map(|(_, doc)| doc);

let position =
if let Some(pos) = super::position_to_offset(&params.text_document_position.position, source_file.as_str()) {
pos
} else {
warn!("Received a position outside of the document boundaries in CompletionParams");
return empty_completion_list();
};
let Some(initiating_doc) = doc else {
warn!("Received invalid index of initiating doc");
return empty_completion_list();
};
let position = if let Some(pos) =
super::position_to_offset(&params.text_document_position.position, initiating_doc.as_str())
{
pos
} else {
warn!("Received a position outside of the document boundaries in CompletionParams");
return empty_completion_list();
};

let config = parse_configuration(source_file.as_str()).ok();
let config = parse_configuration_multi_file(&schema_files)
.ok()
.map(|(_, config)| config);

let mut list = CompletionList {
is_incomplete: false,
Expand All @@ -41,14 +51,15 @@ pub(crate) fn completion(schema: String, params: CompletionParams) -> Completion

let db = {
let mut diag = Diagnostics::new();
ParserDatabase::new_single_file(source_file, &mut diag)
ParserDatabase::new(&schema_files, &mut diag)
};

let ctx = CompletionContext {
config: config.as_ref(),
params: &params,
db: &db,
position,
initiating_file_id: FileId(initiating_doc_id),
};

push_ast_completions(ctx, &mut list);
Expand All @@ -62,6 +73,7 @@ struct CompletionContext<'a> {
params: &'a CompletionParams,
db: &'a ParserDatabase,
position: usize,
initiating_file_id: FileId,
}

impl<'a> CompletionContext<'a> {
Expand Down Expand Up @@ -96,7 +108,7 @@ fn push_ast_completions(ctx: CompletionContext<'_>, completion_list: &mut Comple
_ => ctx.connector().default_relation_mode(),
};

match ctx.db.ast_assert_single().find_at_position(ctx.position) {
match ctx.db.ast(ctx.initiating_file_id).find_at_position(ctx.position) {
ast::SchemaPosition::Model(
_model_id,
ast::ModelPosition::Field(_, ast::FieldPosition::Attribute("relation", _, Some(attr_name))),
Expand Down Expand Up @@ -195,7 +207,7 @@ fn ds_has_prop(ctx: CompletionContext<'_>, prop: &str) -> bool {

fn push_namespaces(ctx: CompletionContext<'_>, completion_list: &mut CompletionList) {
for (namespace, _) in ctx.namespaces() {
let insert_text = if add_quotes(ctx.params, ctx.db.source_assert_single()) {
let insert_text = if add_quotes(ctx.params, ctx.db.source(ctx.initiating_file_id)) {
format!(r#""{namespace}""#)
} else {
namespace.to_string()
Expand Down
2 changes: 1 addition & 1 deletion prisma-fmt/src/text_document_completion/datasource.rs
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ pub(super) fn url_env_db_completion(completion_list: &mut CompletionList, kind:
_ => unreachable!(),
};

let insert_text = if add_quotes(ctx.params, ctx.db.source_assert_single()) {
let insert_text = if add_quotes(ctx.params, ctx.db.source(ctx.initiating_file_id)) {
format!(r#""{text}""#)
} else {
text.to_owned()
Expand Down
3 changes: 2 additions & 1 deletion prisma-fmt/tests/text_document_completion/test_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@ pub(crate) fn test_scenario(scenario_name: &str) {
context: None,
};

let result = prisma_fmt::text_document_completion(schema, &serde_json::to_string_pretty(&params).unwrap());
let schema_files = serde_json::to_string_pretty(&[("schema.prisma", schema)]).unwrap();
let result = prisma_fmt::text_document_completion(schema_files, 0, &serde_json::to_string_pretty(&params).unwrap());
// Prettify the JSON
let result =
serde_json::to_string_pretty(&serde_json::from_str::<lsp_types::CompletionList>(&result).unwrap()).unwrap();
Expand Down
4 changes: 2 additions & 2 deletions prisma-schema-wasm/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,9 +87,9 @@ pub fn preview_features() -> String {
/// Input and output are both JSON, the request being a `CompletionParams` object and the response
/// being a `CompletionList` object.
#[wasm_bindgen]
pub fn text_document_completion(schema: String, params: String) -> String {
pub fn text_document_completion(schema_files: String, initiating_file_id: u32, params: String) -> String {
register_panic_hook();
prisma_fmt::text_document_completion(schema, &params)
prisma_fmt::text_document_completion(schema_files, initiating_file_id, &params)
}

/// This API is modelled on an LSP [code action
Expand Down
4 changes: 2 additions & 2 deletions psl/parser-database/src/files.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@ pub struct Files(pub Vec<(String, schema_ast::SourceFile, ast::SchemaAst)>);

impl Files {
/// Create a new Files instance from multiple files.
pub fn new(files: Vec<(String, schema_ast::SourceFile)>, diagnostics: &mut Diagnostics) -> Self {
pub fn new(files: &[(String, schema_ast::SourceFile)], diagnostics: &mut Diagnostics) -> Self {
let asts = files
.into_iter()

Check failure on line 16 in psl/parser-database/src/files.rs

View workflow job for this annotation

GitHub Actions / clippy linting

this `.into_iter()` call is equivalent to `.iter()` and will not consume the `slice`
.enumerate()
.map(|(file_idx, (path, source))| {
let id = FileId(file_idx as u32);
let ast = schema_ast::parse_schema(source.as_str(), diagnostics, id);
(path, source, ast)
(path.to_owned(), source.clone(), ast)
})
.collect();
Self(asts)
Expand Down
12 changes: 9 additions & 3 deletions psl/parser-database/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ pub use ids::*;
pub use names::is_reserved_type_name;
use names::Names;
pub use relations::{ManyToManyRelationId, ReferentialAction, RelationId};
use schema_ast::ast::SourceConfig;
pub use schema_ast::{ast, SourceFile};
pub use types::{
IndexAlgorithm, IndexFieldPath, IndexType, OperatorClass, RelationFieldId, ScalarFieldId, ScalarFieldType,
Expand Down Expand Up @@ -83,12 +84,12 @@ pub struct ParserDatabase {
impl ParserDatabase {
/// See the docs on [ParserDatabase](/struct.ParserDatabase.html).
pub fn new_single_file(file: SourceFile, diagnostics: &mut Diagnostics) -> Self {
Self::new(vec![("schema.prisma".to_owned(), file)], diagnostics)
Self::new(&[("schema.prisma".to_owned(), file)], diagnostics)
}

/// See the docs on [ParserDatabase](/struct.ParserDatabase.html).
pub fn new(schemas: Vec<(String, schema_ast::SourceFile)>, diagnostics: &mut Diagnostics) -> Self {
let asts = Files::new(schemas, diagnostics);
pub fn new(schemas: &[(String, schema_ast::SourceFile)], diagnostics: &mut Diagnostics) -> Self {
let asts = Files::new(&schemas, diagnostics);

Check failure on line 92 in psl/parser-database/src/lib.rs

View workflow job for this annotation

GitHub Actions / clippy linting

this expression creates a reference which is immediately dereferenced by the compiler

let mut interner = Default::default();
let mut names = Default::default();
Expand Down Expand Up @@ -219,6 +220,11 @@ impl ParserDatabase {
pub fn file_name(&self, file_id: FileId) -> &str {
self.asts[file_id].0.as_str()
}

/// Iterate all datasources defined in the schema
pub fn datasources(&self) -> impl Iterator<Item = &SourceConfig> {
self.iter_asts().flat_map(|ast| ast.sources())
}
}

impl std::ops::Index<FileId> for ParserDatabase {
Expand Down
19 changes: 19 additions & 0 deletions psl/parser-database/src/walkers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ pub use r#enum::*;
pub use relation::*;
pub use relation_field::*;
pub use scalar_field::*;
use schema_ast::ast::WithSpan;

use crate::{ast, FileId};

Expand Down Expand Up @@ -90,6 +91,12 @@ impl crate::ParserDatabase {
.map(move |enum_id| self.walk(enum_id))
}

/// walk all enums in specified file
pub fn walk_enums_in_file(&self, file_id: FileId) -> impl Iterator<Item = EnumWalker<'_>> {
self.walk_enums()
.filter(move |walker| walker.ast_enum().span().file_id == file_id)
}

/// Walk all the models in the schema.
pub fn walk_models(&self) -> impl Iterator<Item = ModelWalker<'_>> + '_ {
self.iter_tops()
Expand All @@ -98,6 +105,12 @@ impl crate::ParserDatabase {
.filter(|m| !m.ast_model().is_view())
}

/// walk all models in specified file
pub fn walk_models_in_file(&self, file_id: FileId) -> impl Iterator<Item = ModelWalker<'_>> {
self.walk_models()
.filter(move |walker| walker.ast_model().span().file_id == file_id)
}

/// Walk all the views in the schema.
pub fn walk_views(&self) -> impl Iterator<Item = ModelWalker<'_>> + '_ {
self.iter_tops()
Expand All @@ -106,6 +119,12 @@ impl crate::ParserDatabase {
.filter(|m| m.ast_model().is_view())
}

/// walk all views in specified file
pub fn walk_views_in_file(&self, file_id: FileId) -> impl Iterator<Item = ModelWalker<'_>> {
self.walk_views()
.filter(move |walker| walker.ast_model().span().file_id == file_id)
}

/// Walk all the composite types in the schema.
pub fn walk_composite_types(&self) -> impl Iterator<Item = CompositeTypeWalker<'_>> + '_ {
self.iter_tops()
Expand Down
2 changes: 1 addition & 1 deletion psl/psl-core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ pub fn parse_configuration(
}

pub fn parse_configuration_multi_file(
files: Vec<(String, SourceFile)>,
files: &[(String, SourceFile)],
connectors: ConnectorRegistry<'_>,
) -> Result<(Files, Configuration), (Files, diagnostics::Diagnostics)> {
let mut diagnostics = Diagnostics::default();
Expand Down
2 changes: 1 addition & 1 deletion psl/psl/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ pub fn parse_configuration(schema: &str) -> Result<Configuration, Diagnostics> {
/// Parses and validates Prisma schemas, but skip analyzing everything except datasource and generator
/// blocks.
pub fn parse_configuration_multi_file(
files: Vec<(String, SourceFile)>,
files: &[(String, SourceFile)],
) -> Result<(Files, Configuration), (Files, Diagnostics)> {
psl_core::parse_configuration_multi_file(files, builtin_connectors::BUILTIN_CONNECTORS)
}
Expand Down

0 comments on commit 5bc901e

Please sign in to comment.