Skip to content

Commit

Permalink
Merge pull request #1065 from hash-org/fix-980
Browse files Browse the repository at this point in the history
Fix import resolution logic
  • Loading branch information
feds01 authored May 30, 2024
2 parents 22403a0 + 5ce4752 commit 23a7634
Show file tree
Hide file tree
Showing 10 changed files with 41 additions and 28 deletions.
4 changes: 2 additions & 2 deletions compiler/hash-ast-desugaring/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,8 @@ impl<Ctx: AstDesugaringCtxQuery> CompilerStage<Ctx> for AstDesugaringPass {

// Iterate over all of the modules and add the expressions
// to the queue so it can be distributed over the threads
for (id, module) in node_map.iter_mut_modules().enumerate() {
let source_id = SourceId::new_module(id as u32);
for (id, module) in node_map.iter_mut_modules() {
let source_id = SourceId::from(*id);
let stage_info = source_stage_info.get(source_id);

// Skip any modules that have already been de-sugared
Expand Down
4 changes: 2 additions & 2 deletions compiler/hash-ast-expand/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,8 @@ impl<Ctx: AstExpansionCtxQuery> CompilerStage<Ctx> for AstExpansionPass {
expander.emit_diagnostics_to(&sender);
}

for (id, module) in node_map.iter_modules().enumerate() {
let source_id = SourceId::new_module(id as u32);
for (id, module) in node_map.iter_modules() {
let source_id = SourceId::from(*id);
let stage_info = source_stage_info.get(source_id);

// Skip any modules that have already been de-sugared
Expand Down
8 changes: 7 additions & 1 deletion compiler/hash-ast-utils/src/tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,13 @@ impl AstVisitor for AstTreePrinter {
&self,
node: ast::AstNodeRef<ast::Import>,
) -> Result<Self::ImportRet, Self::Error> {
Ok(TreeNode::leaf(labelled("import", node.path.to_str(), "\"")))
Ok(TreeNode::branch(
"import",
vec![
TreeNode::leaf(labelled("import", node.path.to_str(), "\"")),
TreeNode::leaf(labelled("source", format!("{:?}", node.source), "")),
],
))
}

type ImportExprRet = TreeNode;
Expand Down
26 changes: 16 additions & 10 deletions compiler/hash-ast/src/node_map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,11 @@
//! This is used to ensure that the same node is not parsed and to retrieve
//! nodes in later compilation stages.
use std::{
collections::{
hash_map::{Iter, IterMut},
HashMap,
},
path::{Path, PathBuf},
slice::{Iter, IterMut},
};

use hash_source::{InteractiveId, ModuleId, SourceId};
Expand Down Expand Up @@ -112,8 +115,11 @@ pub enum SourceRef<'i> {
/// parsed within the current [Workspace].
#[derive(Debug, Default)]
pub struct NodeMap {
/// All [Module] nodes that have been parsed.
modules: IndexVec<ModuleId, ModuleEntry>,
/// All [Module] nodes that have been parsed. We use a [HashMap] here since
/// the order of the parsed modules will not be guaranteed, hence we
/// have to insert by the [ModuleId] that is reserved during parsing.
modules: HashMap<ModuleId, ModuleEntry>,

/// All [InteractiveBlock] nodes that have been parsed.
interactive_blocks: IndexVec<InteractiveId, InteractiveBlock>,
}
Expand All @@ -133,7 +139,7 @@ pub trait HasNodeMap {
impl NodeMap {
/// Create a new [NodeMap]
pub fn new() -> Self {
Self { modules: index_vec![], interactive_blocks: index_vec![] }
Self { modules: HashMap::new(), interactive_blocks: index_vec![] }
}

/// Add a [InteractiveBlock] to the [NodeMap]
Expand All @@ -142,8 +148,8 @@ impl NodeMap {
}

/// Add a [Module] to the [NodeMap]
pub fn add_module(&mut self, module: ModuleEntry) {
self.modules.push(module);
pub fn add_module(&mut self, id: ModuleId, module: ModuleEntry) {
self.modules.insert(id, module);
}

/// Get a [SourceRef] by [SourceId].
Expand All @@ -170,22 +176,22 @@ impl NodeMap {
/// Get a mutable reference to an [Module], panics if the [SourceId]
/// has no backing [Module].
pub fn get_module(&self, id: ModuleId) -> &ModuleEntry {
self.modules.get(id).unwrap()
self.modules.get(&id).unwrap()
}

/// Get a reference to an [Module], panics if the [SourceId]
/// has no backing [Module].
pub fn get_module_mut(&mut self, id: ModuleId) -> &mut ModuleEntry {
self.modules.get_mut(id).unwrap()
self.modules.get_mut(&id).unwrap()
}

/// /// Create an [Iter] over the currently stores modules within [NodeMap]
pub fn iter_modules(&self) -> Iter<ModuleEntry> {
pub fn iter_modules(&self) -> Iter<ModuleId, ModuleEntry> {
self.modules.iter()
}

/// Create an [IterMut] over the currently stores modules within [NodeMap].
pub fn iter_mut_modules(&mut self) -> IterMut<ModuleEntry> {
pub fn iter_mut_modules(&mut self) -> IterMut<ModuleId, ModuleEntry> {
self.modules.iter_mut()
}
}
2 changes: 1 addition & 1 deletion compiler/hash-parser/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ impl<Ctx: ParserCtxQuery> CompilerStage<Ctx> for Parser {
self.merge_metrics(timings);

let path = SourceMapUtils::map(id, |source| source.path().to_path_buf());
node_map.add_module(ModuleEntry::new(path, node));
node_map.add_module(id, ModuleEntry::new(path, node));
}
ParserAction::MergeSpans { spans } => scope.spawn(move |_| {
SpanMap::add_local_map(spans);
Expand Down
2 changes: 1 addition & 1 deletion compiler/hash-pipeline/src/workspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,7 @@ impl Workspace {
dump_ast(node.into(), mode, character_set, writer)?;
}

for module in self.node_map.iter_modules() {
for (_, module) in self.node_map.iter_modules() {
dump_ast(module.node_ref().into(), mode, character_set, writer)?;
}

Expand Down
13 changes: 7 additions & 6 deletions compiler/hash-semantics/src/passes/discovery/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,6 @@ pub struct DiscoveryPass<'env, E: SemanticEnv> {
/// currently inside.
def_state: DefDiscoveryState,

/// The current source being discovered.
source: SourceId,

/// The AST info for the current analysis session.
ast_info: &'env AstInfo,
}
Expand All @@ -53,7 +50,12 @@ impl<E: SemanticEnv> AnalysisPass for DiscoveryPass<'_, E> {
self.visit_body_block(node)
}

fn pass_module(&self, _: SourceId, node: ast::AstNodeRef<ast::Module>) -> SemanticResult<()> {
fn pass_module(
&self,
source: SourceId,
node: ast::AstNodeRef<ast::Module>,
) -> SemanticResult<()> {
debug_assert_eq!(source, node.span().id);
self.visit_module(node)
}

Expand All @@ -68,13 +70,12 @@ impl<E: SemanticEnv> AnalysisPass for DiscoveryPass<'_, E> {
}

impl<'env, E: SemanticEnv> DiscoveryPass<'env, E> {
pub fn new(env: &'env E, ast_info: &'env AstInfo, source: SourceId) -> Self {
pub fn new(env: &'env E, ast_info: &'env AstInfo) -> Self {
Self {
env,
name_hint: LightState::new(None),
def_state: DefDiscoveryState::new(),
ast_info,
source,
}
}

Expand Down
4 changes: 2 additions & 2 deletions compiler/hash-semantics/src/passes/discovery/visitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ impl<E: SemanticEnv> ast::AstVisitor for DiscoveryPass<'_, E> {
&self,
node: ast::AstNodeRef<ast::Module>,
) -> Result<Self::ModuleRet, Self::Error> {
let mod_def_id = self.create_or_get_module_mod_def(self.source.into());
let mod_def_id = self.create_or_get_module_mod_def(node.span().id.into());

// Traverse the module
self.enter_def(node, mod_def_id, || walk::walk_module(self, node))?;
Expand Down Expand Up @@ -448,7 +448,7 @@ impl<E: SemanticEnv> ast::AstVisitor for DiscoveryPass<'_, E> {

type ImportRet = ();
fn visit_import(&self, node: AstNodeRef<ast::Import>) -> Result<Self::ImportRet, Self::Error> {
DiscoveryPass::new(self.env, self.ast_info, node.source).pass_source(node.source)?;
DiscoveryPass::new(self.env, self.ast_info).pass_source(node.source)?;
Ok(())
}
}
2 changes: 1 addition & 1 deletion compiler/hash-semantics/src/passes/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ impl<'env, E: SemanticEnv> Analyser<'env, E> {
let ast_info = AstInfo::new();

// Discover all definitions in the source.
DiscoveryPass::new(self.env, &ast_info, source).pass_source(source)?;
DiscoveryPass::new(self.env, &ast_info).pass_source(source)?;

// Resolve all symbols in the source and create TIR terms.
ResolutionPass::new(self.env, &ast_info).pass_source(source)?;
Expand Down
4 changes: 2 additions & 2 deletions compiler/hash-untyped-semantics/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@ impl<Ctx: UntypedSemanticAnalysisCtxQuery> CompilerStage<Ctx> for UntypedSemanti

// Iterate over all of the modules and add the expressions
// to the queue so it can be distributed over the threads
for (id, module) in node_map.iter_modules().enumerate() {
let source_id = SourceId::new_module(id as u32);
for (id, module) in node_map.iter_modules() {
let source_id = SourceId::from(*id);
let stage_info = source_stage_info.get(source_id);

// Skip any modules that have already been checked
Expand Down

0 comments on commit 23a7634

Please sign in to comment.