Skip to content

Commit

Permalink
Fix import resolution in imports
Browse files Browse the repository at this point in the history
  • Loading branch information
maurobalbi committed Nov 27, 2023
1 parent c202038 commit 4bdbca5
Show file tree
Hide file tree
Showing 12 changed files with 176 additions and 59 deletions.
66 changes: 63 additions & 3 deletions crates/ide/src/def/hir.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::{collections::HashMap, sync::Arc};
use std::{collections::HashMap, fs::File, sync::Arc};

use smol_str::SmolStr;
use syntax::ast::{self, AstNode, HasDocParts};
Expand All @@ -10,8 +10,12 @@ use crate::{
};

use super::{
hir_def::{AdtId, LocalFieldId, LocalVariantId, TypeAliasId, VariantId},
module::{AdtData, FieldData, FunctionData, Param, PatternId, TypeAliasData, VariantData},
hir_def::{AdtId, ImportId, LocalFieldId, LocalVariantId, TypeAliasId, VariantId},
module::{
AdtData, FieldData, FunctionData, ImportData, Param, PatternId, TypeAliasData, VariantData,
},
resolver::resolver_for_toplevel,
semantics::Definition,
source::HasSource,
FunctionId,
};
Expand Down Expand Up @@ -295,6 +299,62 @@ impl_from!(
for ModuleDef
);

#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Import {
pub(crate) id: ImportId,
}

impl Import {
pub fn imported_name(self, db: &dyn DefDatabase) -> SmolStr {
self.data(db).unqualified_name.clone()
}

pub fn is_type_import(self, db: &dyn DefDatabase) -> bool {
self.data(db).is_type_import
}

pub fn imported_alias(self, db: &dyn DefDatabase) -> Option<SmolStr> {
self.data(db).unqualified_as_name.clone()
}

pub fn definition(self, db: &dyn DefDatabase) -> Option<Definition> {
let module = self.imported_from_module(db)?;
let resolver = resolver_for_toplevel(db, module);
if self.is_type_import(db) {
resolver
.resolve_type(&self.imported_name(db))
.map(Into::into)
} else {
resolver
.resolve_name(&self.imported_name(db))
.map(Into::into)
}
}

pub fn import_from_module_name(self, db: &dyn DefDatabase) -> SmolStr {
let import = db.lookup_intern_import(self.id);
let module_idx = self.data(db).module;
db.module_items(import.file_id)[module_idx].accessor.clone()
}

pub fn imported_from_module(self, db: &dyn DefDatabase) -> Option<FileId> {
let import = db.lookup_intern_import(self.id);
let module_accessor = self.import_from_module_name(db);
let resolver = resolver_for_toplevel(db, import.file_id);
tracing::info!("MOUDLE MAP {:?}", resolver);
resolver.resolve_module(&module_accessor)
}

fn data(self, db: &dyn DefDatabase) -> ImportData {
let import = db.lookup_intern_import(self.id);
db.module_items(import.file_id)[import.value].clone()
}

pub fn module(&self, db: &dyn DefDatabase) -> Module {
db.lookup_intern_import(self.id).file_id.into()
}
}

#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct Local {
pub(crate) parent: FunctionId,
Expand Down
17 changes: 14 additions & 3 deletions crates/ide/src/def/hir_def.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ use la_arena::Idx;
use crate::{impl_from, impl_intern, FileId, InFile};

use super::{
hir::{Adt, Field, Function, Module, TypeAlias, Variant},
module::{AdtData, FieldData, FunctionData, TypeAliasData, VariantData},
hir::{Adt, Field, Function, Module, TypeAlias, Variant, Import},
module::{AdtData, FieldData, FunctionData, TypeAliasData, VariantData, ImportData},
DefDatabase,
};
use crate::impl_intern_key;
Expand All @@ -28,7 +28,8 @@ from_id!(
(FunctionId, Function),
(AdtId, Adt),
(FileId, Module),
(TypeAliasId, TypeAlias)
(TypeAliasId, TypeAlias),
(ImportId, Import)
);

impl From<VariantId> for Variant {
Expand Down Expand Up @@ -59,6 +60,16 @@ impl_intern!(
lookup_intern_function
);

#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ImportId(pub salsa::InternId);
pub type ImportLoc = InFile<Idx<ImportData>>;
impl_intern!(
ImportId,
ImportLoc,
intern_import,
lookup_intern_import
);

#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct AdtId(pub salsa::InternId);
pub type AdtLoc = InFile<Idx<AdtData>>;
Expand Down
12 changes: 10 additions & 2 deletions crates/ide/src/def/lower.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,14 @@ impl Index<Idx<AdtData>> for ModuleItemData {
}
}

impl Index<Idx<ImportData>> for ModuleItemData {
type Output = ImportData;

fn index(&self, index: Idx<ImportData>) -> &Self::Output {
&self.unqualified_imports[index]
}
}

impl Index<Idx<VariantData>> for ModuleItemData {
type Output = VariantData;

Expand Down Expand Up @@ -183,7 +191,7 @@ impl LowerCtx {
_ => (),
}
}
/// Here were resolving the imports and allocating the
/// Here were resolving the imports
fn lower_import(&mut self, i: &ast::Import) {
let ast_ptr = AstPtr::new(i);

Expand Down Expand Up @@ -220,7 +228,7 @@ impl LowerCtx {
unqualified.as_name().and_then(|t| t.text());

self.alloc_unqualified_import(ImportData {
type_: unqualified.is_type(),
is_type_import: unqualified.is_type(),
module: module_id,
unqualified_as_name,
unqualified_name,
Expand Down
5 changes: 4 additions & 1 deletion crates/ide/src/def/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ pub use semantics::{classify_node, find_container, Semantics};
pub use syntax::ast::{AstNode, BinaryOpKind as BinaryOp, Expr, UnaryOpKind as UnaryOp};

use self::body::{Body, BodySourceMap};
use self::hir_def::{AdtId, AdtLoc, FunctionId, FunctionLoc, TypeAliasId, TypeAliasLoc};
use self::hir_def::{AdtId, AdtLoc, FunctionId, FunctionLoc, TypeAliasId, TypeAliasLoc, ImportLoc, ImportId};
use self::lower::lower_module;
pub use self::lower::ModuleItemData;
use self::scope::{
Expand All @@ -35,6 +35,9 @@ pub trait InternDatabase: SourceDatabase {
#[salsa::interned]
fn intern_function(&self, loc: FunctionLoc) -> FunctionId;

#[salsa::interned]
fn intern_import(&self, loc: ImportLoc) -> ImportId;

#[salsa::interned]
fn intern_adt(&self, loc: AdtLoc) -> AdtId;

Expand Down
2 changes: 1 addition & 1 deletion crates/ide/src/def/module.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ pub struct ModuleImport {

#[derive(Debug, Clone, Eq, PartialEq)]
pub struct ImportData {
pub type_: bool, // .{ type ... }
pub is_type_import: bool, // .{ type ... }
pub module: Idx<ModuleImport>, // e.g. import >>one/wobble<<

pub unqualified_as_name: Option<SmolStr>, // e.g. {* as >>AsName<<}
Expand Down
39 changes: 28 additions & 11 deletions crates/ide/src/def/scope.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use crate::{DefDatabase, FileId, ModuleMap};
use super::{
body::Body,
hir::Module,
hir_def::{AdtId, AdtLoc, FunctionLoc, ModuleDefId, TypeAliasId, TypeAliasLoc, VariantId},
hir_def::{AdtId, AdtLoc, FunctionLoc, ModuleDefId, TypeAliasId, TypeAliasLoc, VariantId, ImportId, ImportLoc},
module::{Clause, Expr, ExprId, ImportData, Pattern, PatternId, Statement, Visibility},
resolver::ResolveResult,
resolver_for_expr, FunctionId, ModuleItemData,
Expand All @@ -24,7 +24,6 @@ pub fn module_scope_with_map_query(
) -> (Arc<ModuleScope>, Arc<ModuleSourceMap>) {
let module_data = db.module_items(file_id);
let module_map = Module { id: file_id }.package(db).visible_modules(db);
tracing::info!("Module scope query {:?}", module_map);

let mut scope = ModuleScope::default();
let mut module_source_map = ModuleSourceMap::default();
Expand All @@ -38,12 +37,25 @@ pub fn module_scope_with_map_query(
scope.modules.insert(imported_module.accessor.clone(), file);
}

for (_, import) in module_data.unqualified_imports() {
for val in scope.resolve_import(db, &module_map, &module_data, import) {
for (import_id, import) in module_data.unqualified_imports() {
let import_loc = db.intern_import(ImportLoc {
file_id,
value: import_id,
});

module_source_map
.import_map
.insert(import.ast_ptr.clone(), import_loc);
for (is_type_import, val) in scope.resolve_import(db, &module_map, &module_data, import) {
match val {
ModuleDefId::AdtId(_) => scope.types.insert(import.local_name(), val),
ModuleDefId::TypeAliasId(_) => scope.types.insert(import.local_name(), val),
_ => scope.values.insert(import.local_name(), val),
ModuleDefId::AdtId(_) if is_type_import => {
scope.types.insert(import.local_name(), val)
}
ModuleDefId::TypeAliasId(_) if is_type_import => {
scope.types.insert(import.local_name(), val)
}
_ if !is_type_import => scope.values.insert(import.local_name(), val),
_ => None,
};
}
}
Expand Down Expand Up @@ -137,6 +149,7 @@ pub struct ModuleSourceMap {
adt_map: HashMap<AstPtr<ast::Adt>, AdtId>,
variant_map: HashMap<AstPtr<ast::Variant>, VariantId>,
type_alias_map: HashMap<AstPtr<ast::TypeAlias>, TypeAliasId>,
import_map: HashMap<AstPtr<ast::UnqualifiedImport>, ImportId>,
}

impl ModuleSourceMap {
Expand All @@ -145,6 +158,11 @@ impl ModuleSourceMap {
self.function_map.get(&src).copied()
}

pub fn node_to_import(&self, node: &ast::UnqualifiedImport) -> Option<ImportId> {
let src = AstPtr::new(node);
self.import_map.get(&src).copied()
}

pub fn node_to_adt(&self, node: &ast::Adt) -> Option<AdtId> {
let src = AstPtr::new(node);
self.adt_map.get(&src).copied()
Expand Down Expand Up @@ -209,11 +227,11 @@ impl ModuleScope {
module_map: &ModuleMap,
module_items: &ModuleItemData,
import: &ImportData,
) -> Vec<ModuleDefId> {
) -> Vec<(bool, ModuleDefId)> {
let ImportData {
unqualified_name: unqualifed_name,
module,
type_,
is_type_import: is_type,
..
} = import;
let module = &module_items[*module];
Expand All @@ -228,8 +246,7 @@ impl ModuleScope {
items
.iter()
.filter(|i| i.1 == Visibility::Public)
.map(|i| i.0.clone())
.clone()
.map(|i| (is_type.clone(), i.0.clone()))
.collect()
}
}
Expand Down
31 changes: 29 additions & 2 deletions crates/ide/src/def/semantics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use crate::{
};

use super::{
hir::{Adt, BuiltIn, Field, Function, Local, Module, TypeAlias, Variant},
hir::{Adt, BuiltIn, Field, Function, Import, Local, Module, TypeAlias, Variant},
hir_def::ModuleDefId,
resolver::{resolver_for_toplevel, ResolveResult},
source::HasSource,
Expand Down Expand Up @@ -213,6 +213,16 @@ fn classify_name(sema: &Semantics, name: &ast::Name) -> Option<Definition> {

return def
},
ast::UnqualifiedImport(it) => {
let src = sema
.find_file(it.syntax())
.with_value(&it)
.cloned();
let map = sema.db.module_source_map(src.file_id);
let import_id = map.node_to_import(&src.value);
let import: Import = import_id.map(From::from)?;
return import.definition(sema.db.upcast());
},
ast::VariantField(it) => {
let def = sema.to_def(&it).map(From::from);
return def;
Expand All @@ -227,7 +237,7 @@ fn classify_name(sema: &Semantics, name: &ast::Name) -> Option<Definition> {
fn classify_name_ref(sema: &Semantics, name_ref: &ast::NameRef) -> Option<Definition> {
let parent = name_ref.syntax().parent()?;

if let Some(expr) = ast::FieldAccessExpr::cast(parent) {
if let Some(expr) = ast::FieldAccessExpr::cast(parent.clone()) {
return sema.resolve_field(expr).map(Into::into);
}

Expand All @@ -236,6 +246,13 @@ fn classify_name_ref(sema: &Semantics, name_ref: &ast::NameRef) -> Option<Defini

fn classify_type_name(sema: &Semantics, type_name: &ast::TypeName) -> Option<Definition> {
let parent = type_name.syntax().parent()?;
if let Some(it) = ast::UnqualifiedImport::cast(parent.clone()) {
let src = sema.find_file(it.syntax()).with_value(&it).cloned();
let map = sema.db.module_source_map(src.file_id);
let import_id = map.node_to_import(&src.value);
let import: Import = import_id.map(From::from)?;
return import.definition(sema.db.upcast());
};

ast::TypeNameRef::cast(parent)
.and_then(|t| {
Expand Down Expand Up @@ -370,6 +387,16 @@ impl ToDef for ast::Function {
}
}

impl ToDef for ast::UnqualifiedImport {
type Def = Import;

fn to_def(sema: &Semantics<'_>, src: InFile<Self>) -> Option<Self::Def> {
let map = sema.db.module_source_map(src.file_id);
let import_id = map.node_to_import(&src.value);
import_id.map(From::from)
}
}

impl ToDef for ast::VariantField {
type Def = Field;

Expand Down
17 changes: 0 additions & 17 deletions crates/ide/src/def/source_analyzer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,23 +78,6 @@ impl SourceAnalyzer {
let expr_id = self.expr_id(expr)?;
self.infer.as_ref().and_then(|i| i.resolve_module(expr_id))
}

// pub(crate) fn resolve_field_access_expr(
// &self,
// db: &dyn DefDatabase,
// field_expr: &ast::FieldAccessExpr,
// ) -> Option<FunctionId> {
// let base_ty = self.type_of_expr(db, &field_expr.container()?)?;

// let (op_trait, op_fn) = self.lang_trait_fn(db, LangItem::Index, &name![index])?;
// // HACK: subst for all methods coincides with that for their trait because the methods
// // don't have any generic parameters, so we skip building another subst for the methods.
// let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None)
// .push(base_ty.clone())
// .push(index_ty.clone())
// .build();
// Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
// }
}

fn scope_for(
Expand Down
2 changes: 1 addition & 1 deletion crates/ide/src/ide/goto_definition.rs
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ fn test(test: Internal) { test.$0print }"#,
pub type Test
#- test2.gleam
import test.{Test as Dodo}
import test.{type Test as Dodo}
type Local = $0Dodo"#,
expect![
Expand Down
4 changes: 2 additions & 2 deletions crates/ide/src/ty/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -578,7 +578,7 @@ fn aliased_import() {
pub type Bla = String
#- test2.gleam
import test.{Bla, main as dodo}
import test.{type Bla, main as dodo}
fn test(a: String) -> Bla { $0 }"#,
expect!["test: fn(String) -> String"],
Expand All @@ -594,7 +594,7 @@ pub type Wobble(name) {
}
#- test2.gleam
import test.{Wobble as Bobo, main as dodo}
import test.{type Wobble as Bobo, main as dodo}
pub opaque type Nasty {
Nasty
}
Expand Down
Loading

0 comments on commit 4bdbca5

Please sign in to comment.