diff --git a/sway-core/src/language/parsed/declaration/variable.rs b/sway-core/src/language/parsed/declaration/variable.rs index 35be63bb75a..cbd911adb52 100644 --- a/sway-core/src/language/parsed/declaration/variable.rs +++ b/sway-core/src/language/parsed/declaration/variable.rs @@ -1,4 +1,8 @@ -use crate::{language::parsed::Expression, Ident, TypeArgument}; +use std::fmt; + +use crate::{ + engine_threading::DebugWithEngines, language::parsed::Expression, Engines, Ident, TypeArgument, +}; #[derive(Debug, Clone)] pub struct VariableDeclaration { @@ -7,3 +11,9 @@ pub struct VariableDeclaration { pub body: Expression, // will be codeblock variant pub is_mutable: bool, } + +impl DebugWithEngines for VariableDeclaration { + fn fmt(&self, f: &mut fmt::Formatter<'_>, _engines: &Engines) -> fmt::Result { + write!(f, "{}", self.name) + } +} diff --git a/sway-core/src/lib.rs b/sway-core/src/lib.rs index f36a7c96365..062da4edec6 100644 --- a/sway-core/src/lib.rs +++ b/sway-core/src/lib.rs @@ -34,6 +34,7 @@ pub use debug_generation::write_dwarf; use indexmap::IndexMap; use metadata::MetadataManager; use query_engine::{ModuleCacheKey, ModulePath, ProgramsCacheEntry}; +use semantic_analysis::node_analysis::{NodeAnalysis, NodeAnalysisContext}; use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; use std::path::{Path, PathBuf}; @@ -489,7 +490,7 @@ pub fn parsed_to_ast( let module_eval_order = modules_dep_graph.compute_order(handler)?; // Collect the program symbols. - let _collection_ctx = ty::TyProgram::collect( + let symbol_collection_ctx = ty::TyProgram::collect( handler, engines, parse_program, @@ -497,10 +498,14 @@ pub fn parsed_to_ast( &module_eval_order, )?; + let mut node_analysis_ctx = NodeAnalysisContext::new(engines, &symbol_collection_ctx); + let _ = parse_program.analyze(handler, &mut node_analysis_ctx); + // Type check the program. let typed_program_opt = ty::TyProgram::type_check( handler, engines, + &symbol_collection_ctx, parse_program, initial_namespace, package_name, diff --git a/sway-core/src/semantic_analysis.rs b/sway-core/src/semantic_analysis.rs index 2988080309e..e50fd73041e 100644 --- a/sway-core/src/semantic_analysis.rs +++ b/sway-core/src/semantic_analysis.rs @@ -5,6 +5,7 @@ pub(crate) mod coins_analysis; pub mod collection_context; mod module; pub mod namespace; +pub mod node_analysis; mod node_dependencies; mod program; mod type_check_analysis; diff --git a/sway-core/src/semantic_analysis/ast_node/declaration/declaration.rs b/sway-core/src/semantic_analysis/ast_node/declaration/declaration.rs index 3efa4b2906e..6b71382dec1 100644 --- a/sway-core/src/semantic_analysis/ast_node/declaration/declaration.rs +++ b/sway-core/src/semantic_analysis/ast_node/declaration/declaration.rs @@ -333,7 +333,7 @@ impl TyDecl { let impl_self = engines.pe().get_impl_self(&decl_id).as_ref().clone(); let span = impl_self.block_span.clone(); let impl_trait_decl = - match ty::TyImplTrait::type_check_impl_self(handler, ctx.by_ref(), impl_self) { + match ty::TyImplTrait::type_check_impl_self(handler, ctx.by_ref(), decl_id) { Ok(val) => val, Err(err) => return Ok(ty::TyDecl::ErrorRecovery(span, err)), }; diff --git a/sway-core/src/semantic_analysis/ast_node/declaration/impl_trait.rs b/sway-core/src/semantic_analysis/ast_node/declaration/impl_trait.rs index 174abc6254d..88abd071852 100644 --- a/sway-core/src/semantic_analysis/ast_node/declaration/impl_trait.rs +++ b/sway-core/src/semantic_analysis/ast_node/declaration/impl_trait.rs @@ -10,7 +10,7 @@ use sway_error::{ use sway_types::{Ident, Span, Spanned}; use crate::{ - decl_engine::*, + decl_engine::{parsed_id::ParsedDeclId, *}, engine_threading::*, language::{ parsed::*, @@ -19,8 +19,9 @@ use crate::{ }, namespace::{IsExtendingExistingImpl, IsImplSelf, TryInsertingTraitImplOnFailure}, semantic_analysis::{ - type_check_context::EnforceTypeArguments, AbiMode, ConstShadowingMode, - TyNodeDepGraphNodeId, TypeCheckAnalysis, TypeCheckAnalysisContext, TypeCheckContext, + node_analysis::{NodeAnalysis, NodeAnalysisContext, ParsedNodeDepGraphNodeId}, + type_check_context::EnforceTypeArguments, + AbiMode, ConstShadowingMode, TypeCheckAnalysis, TypeCheckAnalysisContext, TypeCheckContext, TypeCheckFinalization, TypeCheckFinalizationContext, }, type_system::*, @@ -277,8 +278,13 @@ impl TyImplTrait { pub(crate) fn type_check_impl_self( handler: &Handler, ctx: TypeCheckContext, - impl_self: ImplSelf, + decl_id: ParsedDeclId, ) -> Result { + let type_engine = ctx.engines.te(); + let decl_engine = ctx.engines.de(); + let engines = ctx.engines(); + + let impl_self = engines.pe().get_impl_self(&decl_id).as_ref().clone(); let ImplSelf { impl_type_parameters, mut implementing_for, @@ -286,10 +292,6 @@ impl TyImplTrait { block_span, } = impl_self; - let type_engine = ctx.engines.te(); - let decl_engine = ctx.engines.de(); - let engines = ctx.engines(); - // create the namespace for the impl ctx.with_const_shadowing_mode(ConstShadowingMode::ItemStyle) .allow_functions() @@ -455,12 +457,29 @@ impl TyImplTrait { IsExtendingExistingImpl::No, )?; + let new_items = &impl_trait.items; + + // First lets perform a node analysis pass. + // This returns a vector with ordered indexes to the items in the order that they + // should be processed. + let ordered_node_indices_opt = ty::TyImplTrait::node_analyze_impl_self_items( + handler, + ctx.by_ref(), + &decl_id, + )?; + + // In case there was any issue processing the dependency graph, then lets just + // process them in the original order. + let ordered_node_indices: Vec<_> = match ordered_node_indices_opt { + Some(value) => value.iter().map(|n| n.index()).collect(), + None => (0..new_items.len()).collect(), + }; + // Now lets do a partial type check of the body of the functions (while deferring full // monomorphization of function applications). We will use this tree to perform type check // analysis (mainly dependency analysis), and re-type check the items ordered by dependency. let mut defer_ctx = ctx.by_ref().with_defer_monomorphization(); - let new_items = &impl_trait.items; for (item, new_item) in items.clone().into_iter().zip(new_items) { match (item, new_item) { (ImplItem::Fn(fn_decl_id), TyTraitItem::Fn(decl_ref)) => { @@ -488,29 +507,6 @@ impl TyImplTrait { } } - let impl_trait_decl = decl_engine.insert(impl_trait.clone()).into(); - - // First lets perform an analysis pass. - // This returns a vector with ordered indexes to the items in the order that they - // should be processed. - let ordered_node_indices_opt = - if let TyDecl::ImplTrait(impl_trait) = &impl_trait_decl { - ty::TyImplTrait::type_check_analyze_impl_self_items( - handler, - ctx.by_ref(), - impl_trait, - )? - } else { - unreachable!(); - }; - - // In case there was any issue processing the dependency graph, then lets just - // process them in the original order. - let ordered_node_indices: Vec<_> = match ordered_node_indices_opt { - Some(value) => value.iter().map(|n| n.index()).collect(), - None => (0..new_items.len()).collect(), - }; - // Now lets type check the body of the functions (for real this time). for idx in ordered_node_indices { match (&items[idx], &new_items[idx]) { @@ -575,18 +571,19 @@ impl TyImplTrait { }) } - pub(crate) fn type_check_analyze_impl_self_items( + pub(crate) fn node_analyze_impl_self_items( handler: &Handler, ctx: TypeCheckContext, - impl_self: &ty::ImplTrait, - ) -> Result>, ErrorEmitted> { + impl_self: &ParsedDeclId, + ) -> Result>, ErrorEmitted> { let engines = ctx.engines; + let symbol_collection_ctx = ctx.symbol_collection_ctx; handler.scope(|handler| { - let mut analysis_ctx = TypeCheckAnalysisContext::new(engines); - let _ = impl_self.type_check_analyze(handler, &mut analysis_ctx); + let mut analysis_ctx = NodeAnalysisContext::new(engines, symbol_collection_ctx); + let _ = impl_self.analyze(handler, &mut analysis_ctx); // Build a sub graph that just contains the items for this impl trait. - let impl_trait_node_index = analysis_ctx.nodes.get(&impl_self.decl_id.unique_id()); + let impl_trait_node_index = analysis_ctx.nodes.get(&impl_self.unique_id()); let sub_graph = analysis_ctx.get_sub_graph( *impl_trait_node_index.expect("expected a valid impl trait node id"), ); diff --git a/sway-core/src/semantic_analysis/module.rs b/sway-core/src/semantic_analysis/module.rs index b9ab1fb5bfe..b2364db2598 100644 --- a/sway-core/src/semantic_analysis/module.rs +++ b/sway-core/src/semantic_analysis/module.rs @@ -297,7 +297,6 @@ impl ty::TyModule { }) .collect::, _>>(); - // TODO: Ordering should be solved across all modules prior to the beginning of type-check. let ordered_nodes = node_dependencies::order_ast_nodes_by_dependency( handler, ctx.engines(), diff --git a/sway-core/src/semantic_analysis/namespace/module.rs b/sway-core/src/semantic_analysis/namespace/module.rs index d3ba5777280..16cc03e6b42 100644 --- a/sway-core/src/semantic_analysis/namespace/module.rs +++ b/sway-core/src/semantic_analysis/namespace/module.rs @@ -6,7 +6,7 @@ use crate::{ ty::{self, TyTraitItem}, CallPath, Visibility, }, - semantic_analysis::*, + semantic_analysis::{collection_context::SymbolCollectionContext, *}, transform::to_parsed_lang, Ident, Namespace, TypeId, TypeInfo, }; @@ -166,7 +166,13 @@ impl Module { ns.root.module.name = ns_name; ns.root.module.is_external = true; ns.root.module.visibility = Visibility::Public; - let type_check_ctx = TypeCheckContext::from_namespace(&mut ns, engines, experimental); + let symbol_collection_ctx = SymbolCollectionContext::new(ns.clone()); + let type_check_ctx = TypeCheckContext::from_namespace( + &mut ns, + engines, + &symbol_collection_ctx, + experimental, + ); let typed_node = ty::TyAstNode::type_check(handler, type_check_ctx, ast_node).unwrap(); // get the decl out of the typed node: // we know as an invariant this must be a const decl, as we hardcoded a const decl in diff --git a/sway-core/src/semantic_analysis/node_analysis.rs b/sway-core/src/semantic_analysis/node_analysis.rs new file mode 100644 index 00000000000..9200e62f84b --- /dev/null +++ b/sway-core/src/semantic_analysis/node_analysis.rs @@ -0,0 +1,689 @@ +//! This module handles the process of iterating through the parsed AST and doing an analysis. +//! At the moment we compute an dependency graph between parsed nodes. + +use std::collections::{HashMap, HashSet}; +use std::fmt::Display; +use std::fs; + +use petgraph::stable_graph::NodeIndex; +use petgraph::Graph; +use sway_error::handler::{ErrorEmitted, Handler}; + +use crate::decl_engine::parsed_id::ParsedDeclId; +use crate::decl_engine::DeclUniqueId; +use crate::engine_threading::DebugWithEngines; +use crate::language::parsed::{ + AbiDeclaration, AstNode, AstNodeContent, CodeBlock, ConstantDeclaration, Declaration, + EnumDeclaration, Expression, ExpressionKind, FunctionDeclaration, ImplItem, ImplSelf, + ImplTrait, ParseModule, ParseProgram, StorageDeclaration, StructDeclaration, TraitDeclaration, + TraitTypeDeclaration, TypeAliasDeclaration, VariableDeclaration, +}; +use crate::Engines; + +use super::collection_context::SymbolCollectionContext; + +pub type ParsedNodeDepGraphNodeId = petgraph::graph::NodeIndex; + +#[derive(Clone, Debug)] +pub enum ParsedNodeDepGraphEdgeInfo { + FnApp, +} + +#[derive(Clone, Debug)] +pub struct ParsedNodeDepGraphEdge(pub ParsedNodeDepGraphEdgeInfo); + +impl Display for ParsedNodeDepGraphEdge { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self.0 { + ParsedNodeDepGraphEdgeInfo::FnApp => write!(f, "fn app"), + } + } +} + +#[derive(Clone, Debug)] +pub enum ParsedNodeDepGraphNode { + ImplSelf { + decl_id: ParsedDeclId, + }, + ImplTrait { + decl_id: ParsedDeclId, + }, + ImplTraitItem { + item: ImplItem, + }, + Fn { + decl_id: ParsedDeclId, + }, + Variable { + decl_id: ParsedDeclId, + }, +} + +// Represents an ordered graph between declaration id indexes. +pub type ParsedNodeDepGraph = + petgraph::graph::DiGraph; + +// A simple context that is used to pass context necessary for parsed AST analysis. +pub struct NodeAnalysisContext<'cx> { + pub(crate) engines: &'cx Engines, + pub(crate) symbol_ctx: &'cx SymbolCollectionContext, + pub(crate) dep_graph: ParsedNodeDepGraph, + pub(crate) nodes: HashMap, + pub(crate) items_node_stack: Vec, + pub(crate) node_stack: Vec, +} + +impl NodeAnalysisContext<'_> { + pub fn add_node(&mut self, node: ParsedNodeDepGraphNode) -> ParsedNodeDepGraphNodeId { + self.dep_graph.add_node(node) + } + + pub fn add_edge_from_current( + &mut self, + to: ParsedNodeDepGraphNodeId, + edge: ParsedNodeDepGraphEdge, + ) { + let from = *self.node_stack.last().unwrap(); + if !self.dep_graph.contains_edge(from, to) { + self.dep_graph.add_edge(from, to, edge); + } + } + + #[allow(clippy::map_entry)] + pub fn get_or_create_node_for_impl_item( + &mut self, + item: &ImplItem, + ) -> ParsedNodeDepGraphNodeId { + let id = match item { + ImplItem::Fn(decl_id) => decl_id.unique_id(), + ImplItem::Constant(decl_id) => decl_id.unique_id(), + ImplItem::Type(decl_id) => decl_id.unique_id(), + }; + if self.nodes.contains_key(&id) { + *self.nodes.get(&id).unwrap() + } else { + let item_node = + self.add_node(ParsedNodeDepGraphNode::ImplTraitItem { item: item.clone() }); + + self.nodes.insert(id, item_node); + item_node + } + } + + /// This functions either gets an existing node in the graph, or creates a new + /// node corresponding to the passed function declaration node. + /// The function will try to find a non-monomorphized declaration node id so that + /// future acesses always normalize to the same node id. + #[allow(clippy::map_entry)] + pub fn get_or_create_node_for_fn_decl( + &mut self, + fn_decl_id: &ParsedDeclId, + ) -> ParsedNodeDepGraphNodeId { + let id = fn_decl_id.unique_id(); + if self.nodes.contains_key(&id) { + *self.nodes.get(&id).unwrap() + } else { + let item_node = self.add_node(ParsedNodeDepGraphNode::Fn { + decl_id: *fn_decl_id, + }); + + self.nodes.insert(id, item_node); + item_node + } + } + + /// This function will process an impl self declaration, pushing graph nodes + /// corresponding to each item in the trait impl. + #[allow(clippy::map_entry)] + pub(crate) fn push_nodes_for_impl_self( + &mut self, + impl_self: &ParsedDeclId, + ) -> ParsedNodeDepGraphNodeId { + if self.nodes.contains_key(&impl_self.unique_id()) { + *self.nodes.get(&impl_self.unique_id()).unwrap() + } else { + let node = self.add_node(ParsedNodeDepGraphNode::ImplSelf { + decl_id: *impl_self, + }); + self.nodes.insert(impl_self.unique_id(), node); + + let decl_engine = self.engines.pe(); + let impl_self = decl_engine.get_impl_self(impl_self); + + for item in impl_self.items.iter() { + let item_node = self.get_or_create_node_for_impl_item(item); + + // Connect the item node to the impl trait node. + self.dep_graph.add_edge( + node, + item_node, + ParsedNodeDepGraphEdge(ParsedNodeDepGraphEdgeInfo::FnApp), + ); + + self.items_node_stack.push(item_node); + } + + node + } + } + + /// This function will process an impl trait declaration, pushing graph nodes + /// corresponding to each item in the trait impl. + #[allow(clippy::map_entry)] + pub(crate) fn push_nodes_for_impl_trait( + &mut self, + impl_trait: &ParsedDeclId, + ) -> ParsedNodeDepGraphNodeId { + if self.nodes.contains_key(&impl_trait.unique_id()) { + *self.nodes.get(&impl_trait.unique_id()).unwrap() + } else { + let node = self.add_node(ParsedNodeDepGraphNode::ImplTrait { + decl_id: *impl_trait, + }); + self.nodes.insert(impl_trait.unique_id(), node); + + let decl_engine = self.engines.pe(); + let impl_trait = decl_engine.get_impl_trait(impl_trait); + + for item in impl_trait.items.iter() { + let item_node = self.get_or_create_node_for_impl_item(item); + + // Connect the item node to the impl trait node. + self.dep_graph.add_edge( + node, + item_node, + ParsedNodeDepGraphEdge(ParsedNodeDepGraphEdgeInfo::FnApp), + ); + + self.items_node_stack.push(item_node); + } + + node + } + } + + /// This function will return an option to the node that represents the + /// function being referenced by a function application. + pub(crate) fn get_node_for_fn_decl( + &mut self, + fn_decl_id: &ParsedDeclId, + ) -> Option { + if let Some(found) = self.nodes.get(&fn_decl_id.unique_id()) { + return Some(*found); + } + + for index in self.items_node_stack.iter().rev() { + let node = self + .dep_graph + .node_weight(*index) + .expect("expecting valid node id"); + + let item_fn_decl_id = match node { + ParsedNodeDepGraphNode::ImplTrait { decl_id: _ } => unreachable!(), + ParsedNodeDepGraphNode::ImplTraitItem { + item: ImplItem::Fn(decl_id), + } => *decl_id, + ParsedNodeDepGraphNode::Fn { + decl_id: fn_decl_id, + } => *fn_decl_id, + _ => continue, + }; + + if item_fn_decl_id.unique_id() == fn_decl_id.unique_id() { + return Some(*index); + } + } + + // If no node has been found yet, create it. + let node = self.get_or_create_node_for_fn_decl(fn_decl_id); + Some(node) + } + + /// Prints out GraphViz DOT format for the dependency graph. + #[allow(dead_code)] + pub(crate) fn visualize(&self, engines: &Engines, print_graph: Option) { + if let Some(graph_path) = print_graph { + use petgraph::dot::{Config, Dot}; + let string_graph = self.dep_graph.filter_map( + |_idx, node| Some(format!("{:?}", engines.help_out(node))), + |_idx, edge| Some(format!("{}", edge)), + ); + + let output = format!( + "{:?}", + Dot::with_attr_getters( + &string_graph, + &[Config::NodeNoLabel, Config::EdgeNoLabel], + &|_, er| format!("label = {:?}", er.weight()), + &|_, nr| { + let _node = &self.dep_graph[nr.0]; + let shape = ""; + let url = "".to_string(); + format!("{shape} label = {:?} {url}", nr.1) + }, + ) + ); + + if graph_path.is_empty() { + tracing::info!("{output}"); + } else { + let result = fs::write(graph_path.clone(), output); + if let Some(error) = result.err() { + tracing::error!( + "There was an issue while outputing type check analysis graph to path {graph_path:?}\n{error}" + ); + } + } + } + } + + pub(crate) fn get_sub_graph( + &self, + node_index: NodeIndex, + ) -> Graph<&ParsedNodeDepGraphNode, &ParsedNodeDepGraphEdge> { + let neighbors: Vec<_> = self + .dep_graph + .neighbors_directed(node_index, petgraph::Direction::Outgoing) + .collect(); + let neighbors_set: HashSet<&NodeIndex> = HashSet::from_iter(neighbors.iter()); + self.dep_graph.filter_map( + |node_index, node| { + if neighbors_set.contains(&node_index) { + Some(node) + } else { + None + } + }, + |_edge_index, edge| Some(edge), + ) + } +} + +impl DebugWithEngines for ParsedNodeDepGraphNode { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>, engines: &Engines) -> std::fmt::Result { + match self { + ParsedNodeDepGraphNode::ImplSelf { decl_id } => { + let node = engines.pe().get_impl_self(decl_id); + node.fmt(f, engines) + } + ParsedNodeDepGraphNode::ImplTrait { decl_id } => { + let node = engines.pe().get_impl_trait(decl_id); + node.fmt(f, engines) + } + ParsedNodeDepGraphNode::ImplTraitItem { item: node } => node.fmt(f, engines), + ParsedNodeDepGraphNode::Fn { decl_id } => { + let node = engines.pe().get_function(decl_id); + node.fmt(f, engines) + } + ParsedNodeDepGraphNode::Variable { decl_id } => { + let node = engines.pe().get_variable(decl_id); + node.fmt(f, engines) + } + } + } +} + +impl<'cx> NodeAnalysisContext<'cx> { + pub fn new(engines: &'cx Engines, symbol_ctx: &'cx SymbolCollectionContext) -> Self { + Self { + engines, + symbol_ctx, + dep_graph: Default::default(), + nodes: Default::default(), + items_node_stack: Default::default(), + node_stack: Default::default(), + } + } +} + +pub(crate) trait NodeAnalysis { + fn analyze(&self, handler: &Handler, ctx: &mut NodeAnalysisContext) + -> Result<(), ErrorEmitted>; +} + +impl NodeAnalysis for ParseProgram { + fn analyze( + &self, + handler: &Handler, + ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + self.root.analyze(handler, ctx) + } +} + +impl NodeAnalysis for ParseModule { + fn analyze( + &self, + handler: &Handler, + ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + for (_name, submodule) in &self.submodules { + let _ = submodule.module.analyze(handler, ctx); + } + for node in &self.tree.root_nodes { + let _ = node.analyze(handler, ctx); + } + Ok(()) + } +} + +impl NodeAnalysis for AstNode { + fn analyze( + &self, + handler: &Handler, + ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + self.content.analyze(handler, ctx) + } +} + +impl NodeAnalysis for AstNodeContent { + fn analyze( + &self, + handler: &Handler, + ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + match self { + AstNodeContent::UseStatement(_) | AstNodeContent::IncludeStatement(_) => { + // Already handled by [`ModuleDepGraph`] + } + AstNodeContent::Declaration(node) => node.analyze(handler, ctx)?, + AstNodeContent::Expression(node) => node.analyze(handler, ctx)?, + AstNodeContent::Error(_, _) => {} + } + Ok(()) + } +} + +impl NodeAnalysis for Declaration { + fn analyze( + &self, + handler: &Handler, + ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + match self { + Declaration::VariableDeclaration(decl_id) => decl_id.analyze(handler, ctx), + Declaration::FunctionDeclaration(decl_id) => decl_id.analyze(handler, ctx), + Declaration::TraitDeclaration(decl_id) => decl_id.analyze(handler, ctx), + Declaration::StructDeclaration(decl_id) => decl_id.analyze(handler, ctx), + Declaration::EnumDeclaration(decl_id) => decl_id.analyze(handler, ctx), + Declaration::ImplTrait(decl_id) => decl_id.analyze(handler, ctx), + Declaration::ImplSelf(decl_id) => decl_id.analyze(handler, ctx), + Declaration::AbiDeclaration(decl_id) => decl_id.analyze(handler, ctx), + Declaration::ConstantDeclaration(decl_id) => decl_id.analyze(handler, ctx), + Declaration::StorageDeclaration(decl_id) => decl_id.analyze(handler, ctx), + Declaration::TypeAliasDeclaration(decl_id) => decl_id.analyze(handler, ctx), + Declaration::TraitTypeDeclaration(decl_id) => decl_id.analyze(handler, ctx), + } + } +} + +impl NodeAnalysis for Expression { + fn analyze( + &self, + handler: &Handler, + ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + self.kind.analyze(handler, ctx) + } +} + +impl NodeAnalysis for ExpressionKind { + fn analyze( + &self, + handler: &Handler, + ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + match self { + ExpressionKind::Error(_, _) => todo!(), + ExpressionKind::Literal(_lit) => Ok(()), + ExpressionKind::AmbiguousPathExpression(_) => todo!(), + ExpressionKind::FunctionApplication(_) => todo!(), + ExpressionKind::LazyOperator(_) => todo!(), + ExpressionKind::AmbiguousVariableExpression(_) => todo!(), + ExpressionKind::Variable(_) => todo!(), + ExpressionKind::Tuple(_) => todo!(), + ExpressionKind::TupleIndex(_) => todo!(), + ExpressionKind::Array(_) => todo!(), + ExpressionKind::Struct(_) => todo!(), + ExpressionKind::CodeBlock(block) => block.analyze(handler, ctx), + ExpressionKind::If(expr) => { + let _ = expr.condition.analyze(handler, ctx); + let _ = expr.then.analyze(handler, ctx); + if let Some(expr) = &expr.r#else { + let _ = expr.analyze(handler, ctx); + } + Ok(()) + } + ExpressionKind::Match(_) => todo!(), + ExpressionKind::Asm(_expr) => Ok(()), + ExpressionKind::MethodApplication(_) => todo!(), + ExpressionKind::Subfield(_) => todo!(), + ExpressionKind::DelineatedPath(_) => todo!(), + ExpressionKind::AbiCast(_) => todo!(), + ExpressionKind::ArrayIndex(_) => todo!(), + ExpressionKind::StorageAccess(_) => todo!(), + ExpressionKind::IntrinsicFunction(_) => Ok(()), + ExpressionKind::WhileLoop(_) => todo!(), + ExpressionKind::ForLoop(_) => todo!(), + ExpressionKind::Break => todo!(), + ExpressionKind::Continue => todo!(), + ExpressionKind::Reassignment(_) => todo!(), + ExpressionKind::ImplicitReturn(expr) => expr.analyze(handler, ctx), + ExpressionKind::Return(_) => todo!(), + ExpressionKind::Ref(_) => todo!(), + ExpressionKind::Deref(_) => todo!(), + } + } +} + +impl NodeAnalysis for ParsedDeclId { + fn analyze( + &self, + _handler: &Handler, + ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + let id = self.unique_id(); + if !ctx.nodes.contains_key(&id) { + let item_node = ctx.add_node(ParsedNodeDepGraphNode::Variable { decl_id: *self }); + ctx.nodes.insert(id, item_node); + } + Ok(()) + } +} + +impl NodeAnalysis for ParsedDeclId { + fn analyze( + &self, + handler: &Handler, + ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + handler.scope(|handler| { + let node = ctx.get_node_for_fn_decl(self); + if let Some(node) = node { + ctx.node_stack.push(node); + + let fn_decl = ctx.engines.pe().get_function(self); + let _ = fn_decl.analyze(handler, ctx); + + ctx.node_stack.pop(); + } + Ok(()) + }) + } +} + +impl NodeAnalysis for FunctionDeclaration { + fn analyze( + &self, + handler: &Handler, + ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + handler.scope(|handler| self.body.analyze(handler, ctx)) + } +} + +impl NodeAnalysis for ParsedDeclId { + fn analyze( + &self, + _handler: &Handler, + _ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + Ok(()) + } +} + +impl NodeAnalysis for ParsedDeclId { + fn analyze( + &self, + _handler: &Handler, + _ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + todo!() + } +} + +impl NodeAnalysis for ParsedDeclId { + fn analyze( + &self, + _handler: &Handler, + _ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + todo!() + } +} + +impl NodeAnalysis for ParsedDeclId { + fn analyze( + &self, + handler: &Handler, + ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + let parsed_decl_engine = ctx.engines.pe(); + let impl_trait = parsed_decl_engine.get_impl_trait(self); + + // Lets create a graph node for the impl trait and for every item in the trait. + ctx.push_nodes_for_impl_trait(self); + + // Now lets analyze each impl trait item. + for (i, item) in impl_trait.items.iter().enumerate() { + let _node = ctx.items_node_stack[i]; + item.analyze(handler, ctx)?; + } + + // Clear the work-in-progress node stack. + ctx.items_node_stack.clear(); + + Ok(()) + } +} + +impl NodeAnalysis for ParsedDeclId { + fn analyze( + &self, + handler: &Handler, + ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + let parsed_decl_engine = ctx.engines.pe(); + let impl_trait = parsed_decl_engine.get_impl_self(self); + + // Lets create a graph node for the impl self and for every item in the trait. + ctx.push_nodes_for_impl_self(self); + + // Now lets analyze each impl trait item. + for (i, item) in impl_trait.items.iter().enumerate() { + let _node = ctx.items_node_stack[i]; + item.analyze(handler, ctx)?; + } + + // Clear the work-in-progress node stack. + ctx.items_node_stack.clear(); + + Ok(()) + } +} + +impl NodeAnalysis for ImplItem { + fn analyze( + &self, + handler: &Handler, + ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + match self { + ImplItem::Fn(node) => { + node.analyze(handler, ctx)?; + } + ImplItem::Constant(node) => { + node.analyze(handler, ctx)?; + } + ImplItem::Type(node) => { + node.analyze(handler, ctx)?; + } + } + + Ok(()) + } +} + +impl NodeAnalysis for ParsedDeclId { + fn analyze( + &self, + _handler: &Handler, + _ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + todo!() + } +} + +impl NodeAnalysis for ParsedDeclId { + fn analyze( + &self, + _handler: &Handler, + _ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + todo!() + } +} + +impl NodeAnalysis for ParsedDeclId { + fn analyze( + &self, + _handler: &Handler, + _ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + todo!() + } +} + +impl NodeAnalysis for ParsedDeclId { + fn analyze( + &self, + _handler: &Handler, + _ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + todo!() + } +} + +impl NodeAnalysis for ParsedDeclId { + fn analyze( + &self, + _handler: &Handler, + _ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + todo!() + } +} + +impl NodeAnalysis for CodeBlock { + fn analyze( + &self, + handler: &Handler, + ctx: &mut NodeAnalysisContext, + ) -> Result<(), ErrorEmitted> { + for node in self.contents.iter() { + node.analyze(handler, ctx)?; + } + Ok(()) + } +} diff --git a/sway-core/src/semantic_analysis/program.rs b/sway-core/src/semantic_analysis/program.rs index 7c0cff92d76..6d2f50e4fb6 100644 --- a/sway-core/src/semantic_analysis/program.rs +++ b/sway-core/src/semantic_analysis/program.rs @@ -42,9 +42,11 @@ impl TyProgram { /// /// The given `initial_namespace` acts as an initial state for each module within this program. /// It should contain a submodule for each library package dependency. + #[allow(clippy::too_many_arguments)] pub fn type_check( handler: &Handler, engines: &Engines, + symbol_collection_ctx: &SymbolCollectionContext, parsed: &ParseProgram, initial_namespace: namespace::Root, package_name: &str, @@ -54,8 +56,13 @@ impl TyProgram { let experimental = build_config.map(|x| x.experimental).unwrap_or_default(); let mut namespace = Namespace::init_root(initial_namespace); - let mut ctx = TypeCheckContext::from_root(&mut namespace, engines, experimental) - .with_kind(parsed.kind); + let mut ctx = TypeCheckContext::from_root( + &mut namespace, + engines, + symbol_collection_ctx, + experimental, + ) + .with_kind(parsed.kind); let ParseProgram { root, kind } = parsed; diff --git a/sway-core/src/semantic_analysis/type_check_context.rs b/sway-core/src/semantic_analysis/type_check_context.rs index 603cc66f1f0..0ec7f4a1330 100644 --- a/sway-core/src/semantic_analysis/type_check_context.rs +++ b/sway-core/src/semantic_analysis/type_check_context.rs @@ -24,7 +24,7 @@ use sway_error::{ use sway_types::{span::Span, Ident, Spanned}; use sway_utils::iter_prefixes; -use super::GenericShadowingMode; +use super::{collection_context::SymbolCollectionContext, GenericShadowingMode}; /// Contextual state tracked and accumulated throughout type-checking. pub struct TypeCheckContext<'a> { @@ -40,6 +40,8 @@ pub struct TypeCheckContext<'a> { pub(crate) engines: &'a Engines, + pub(crate) symbol_collection_ctx: &'a SymbolCollectionContext, + // The following set of fields are intentionally private. When a `TypeCheckContext` is passed // into a new node during type checking, these fields should be updated using the `with_*` // methods which provides a new `TypeCheckContext`, ensuring we don't leak our changes into @@ -107,11 +109,13 @@ impl<'a> TypeCheckContext<'a> { pub fn from_namespace( namespace: &'a mut Namespace, engines: &'a Engines, + symbol_collection_ctx: &'a SymbolCollectionContext, experimental: ExperimentalFlags, ) -> Self { Self { namespace, engines, + symbol_collection_ctx, type_annotation: engines.te().insert(engines, TypeInfo::Unknown, None), function_type_annotation: engines.te().insert(engines, TypeInfo::Unknown, None), unify_generic: false, @@ -141,19 +145,22 @@ impl<'a> TypeCheckContext<'a> { pub fn from_root( root_namespace: &'a mut Namespace, engines: &'a Engines, + symbol_collection_ctx: &'a SymbolCollectionContext, experimental: ExperimentalFlags, ) -> Self { - Self::from_module_namespace(root_namespace, engines, experimental) + Self::from_module_namespace(root_namespace, engines, symbol_collection_ctx, experimental) } fn from_module_namespace( namespace: &'a mut Namespace, engines: &'a Engines, + symbol_collection_ctx: &'a SymbolCollectionContext, experimental: ExperimentalFlags, ) -> Self { Self { namespace, engines, + symbol_collection_ctx, type_annotation: engines.te().insert(engines, TypeInfo::Unknown, None), function_type_annotation: engines.te().insert(engines, TypeInfo::Unknown, None), unify_generic: false, @@ -184,6 +191,7 @@ impl<'a> TypeCheckContext<'a> { TypeCheckContext { namespace: self.namespace, type_annotation: self.type_annotation, + symbol_collection_ctx: self.symbol_collection_ctx, function_type_annotation: self.function_type_annotation, unify_generic: self.unify_generic, self_type: self.self_type, @@ -211,6 +219,7 @@ impl<'a> TypeCheckContext<'a> { let ctx = TypeCheckContext { namespace: &mut namespace, type_annotation: self.type_annotation, + symbol_collection_ctx: self.symbol_collection_ctx, function_type_annotation: self.function_type_annotation, unify_generic: self.unify_generic, self_type: self.self_type, @@ -246,10 +255,16 @@ impl<'a> TypeCheckContext<'a> { // We're checking a submodule, so no need to pass through anything other than the // namespace and the engines. let engines = self.engines; + let symbol_collection_ctx = self.symbol_collection_ctx; let mut submod_ns = self .namespace_mut() .enter_submodule(mod_name, visibility, module_span); - let submod_ctx = TypeCheckContext::from_namespace(&mut submod_ns, engines, experimental); + let submod_ctx = TypeCheckContext::from_namespace( + &mut submod_ns, + engines, + symbol_collection_ctx, + experimental, + ); with_submod_ctx(submod_ctx) }