Skip to content

Commit

Permalink
fix: use the same Resolver everywhere (#253)
Browse files Browse the repository at this point in the history
<!-- Thank you for contributing! -->

### Description

<!-- Please insert your description here and provide especially info about the "what" this PR is solving -->

### Test Plan

<!-- e.g. is there anything you'd like reviewers to focus on? -->

---
  • Loading branch information
hyf0 committed Nov 14, 2023
1 parent c23822f commit cbcee58
Show file tree
Hide file tree
Showing 3 changed files with 40 additions and 27 deletions.
28 changes: 17 additions & 11 deletions crates/rolldown/src/bundler/bundler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,16 @@ use crate::{
bundler::{bundle::bundle::Bundle, stages::scan_stage::ScanStage},
error::BatchedResult,
plugin::plugin::BoxPlugin,
HookBuildEndArgs, InputOptions, OutputOptions,
HookBuildEndArgs, InputOptions, OutputOptions, SharedResolver,
};

type BuildResult<T> = Result<T, Vec<BuildError>>;

pub struct Bundler<T: FileSystem> {
pub struct Bundler<T: FileSystem + Default> {
input_options: InputOptions,
plugin_driver: SharedPluginDriver,
fs: T,
resolver: SharedResolver<T>,
}

impl Bundler<OsFileSystem> {
Expand All @@ -38,7 +39,12 @@ impl Bundler<OsFileSystem> {
impl<T: FileSystem + Default + 'static> Bundler<T> {
pub fn with_plugins_and_fs(input_options: InputOptions, plugins: Vec<BoxPlugin>, fs: T) -> Self {
// rolldown_tracing::enable_tracing_on_demand();
Self { input_options, plugin_driver: Arc::new(PluginDriver::new(plugins)), fs }
Self {
resolver: Resolver::with_cwd_and_fs(input_options.cwd.clone(), false, fs.share()).into(),
plugin_driver: Arc::new(PluginDriver::new(plugins)),
input_options,
fs,
}
}

pub async fn write(&mut self, output_options: OutputOptions) -> BuildResult<Vec<Output>> {
Expand Down Expand Up @@ -95,14 +101,14 @@ impl<T: FileSystem + Default + 'static> Bundler<T> {
}

async fn build_inner(&mut self) -> BatchedResult<Graph> {
// TODO: should use a unified resolver
let resolver =
Arc::new(Resolver::with_cwd_and_fs(self.input_options.cwd.clone(), false, self.fs.share()));

let build_info =
ScanStage::new(&self.input_options, Arc::clone(&self.plugin_driver), Arc::clone(&resolver))
.scan(self.fs.share())
.await?;
let build_info = ScanStage::new(
&self.input_options,
Arc::clone(&self.plugin_driver),
self.fs.share(),
Arc::clone(&self.resolver),
)
.scan()
.await?;

let mut graph = Graph::new(build_info);
graph.link()?;
Expand Down
10 changes: 7 additions & 3 deletions crates/rolldown/src/bundler/module_loader/module_loader.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use index_vec::IndexVec;
use rolldown_common::{ImportKind, ModuleId, RawPath, ResourceId};
use rolldown_fs::FileSystem;
use rolldown_resolver::Resolver;
use rustc_hash::{FxHashMap, FxHashSet};

use super::normal_module_task::NormalModuleTask;
Expand Down Expand Up @@ -103,13 +102,18 @@ impl ModuleLoaderContext {
}

impl<'a, T: FileSystem + 'static + Default> ModuleLoader<'a, T> {
pub fn new(input_options: &'a InputOptions, plugin_driver: SharedPluginDriver, fs: T) -> Self {
pub fn new(
input_options: &'a InputOptions,
plugin_driver: SharedPluginDriver,
fs: T,
resolver: SharedResolver<T>,
) -> Self {
let (tx, rx) = tokio::sync::mpsc::unbounded_channel::<Msg>();
Self {
tx,
rx,
input_options,
resolver: Resolver::with_cwd_and_fs(input_options.cwd.clone(), false, fs.share()).into(),
resolver,
fs,
ctx: ModuleLoaderContext::default(),
plugin_driver,
Expand Down
29 changes: 16 additions & 13 deletions crates/rolldown/src/bundler/stages/scan_stage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,11 @@ use crate::{
HookResolveIdArgsOptions, InputOptions, SharedResolver,
};

pub struct ScanStage<'me, T: FileSystem + Default> {
pub struct ScanStage<'me, Fs: FileSystem + Default> {
input_options: &'me InputOptions,
plugin_driver: SharedPluginDriver,
resolver: SharedResolver<T>,
fs: Fs,
resolver: SharedResolver<Fs>,
}

pub struct ScanStageOutput {
Expand All @@ -31,13 +32,14 @@ pub struct ScanStageOutput {
pub runtime: Runtime,
}

impl<'me, T: FileSystem + Default> ScanStage<'me, T> {
impl<'me, Fs: FileSystem + Default + 'static> ScanStage<'me, Fs> {
pub fn new(
input_options: &'me InputOptions,
plugin_driver: SharedPluginDriver,
resolver: SharedResolver<T>,
fs: Fs,
resolver: SharedResolver<Fs>,
) -> Self {
Self { input_options, plugin_driver, resolver }
Self { input_options, plugin_driver, fs, resolver }
}

fn resolve_entries(&self) -> BatchedResult<Vec<(Option<String>, ResolvedRequestInfo)>> {
Expand Down Expand Up @@ -84,18 +86,19 @@ impl<'me, T: FileSystem + Default> ScanStage<'me, T> {
}
}

pub async fn scan<Fs: FileSystem + Default + 'static>(
&self,
fs: Fs,
) -> BatchedResult<ScanStageOutput> {
pub async fn scan(&self) -> BatchedResult<ScanStageOutput> {
assert!(!self.input_options.input.is_empty(), "You must supply options.input to rolldown");

let resolved_entries = self.resolve_entries()?;

let (modules, runtime, symbols, entries) =
ModuleLoader::new(self.input_options, Arc::clone(&self.plugin_driver), fs)
.fetch_all_modules(&resolved_entries)
.await?;
let (modules, runtime, symbols, entries) = ModuleLoader::new(
self.input_options,
Arc::clone(&self.plugin_driver),
self.fs.share(),
Arc::clone(&self.resolver),
)
.fetch_all_modules(&resolved_entries)
.await?;

Ok(ScanStageOutput { modules, entries, symbols, runtime })
}
Expand Down

0 comments on commit cbcee58

Please sign in to comment.