Skip to content

Commit

Permalink
chore: remove old treeshaking (#6549)
Browse files Browse the repository at this point in the history
* chore: remove old treeshaking

* chore: remove builtin option
  • Loading branch information
JSerFeng committed May 22, 2024
1 parent 043b081 commit 25edcdc
Show file tree
Hide file tree
Showing 229 changed files with 291 additions and 4,020 deletions.
3 changes: 1 addition & 2 deletions crates/node_binding/binding.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1134,7 +1134,6 @@ export interface RawOptions {
node?: RawNodeOption
profile: boolean
bail: boolean
builtins: RawBuiltins
}

export interface RawOutputOptions {
Expand Down Expand Up @@ -1282,7 +1281,7 @@ export interface RawResolveTsconfigOptions {
}

export interface RawRspackFuture {
newTreeshaking: boolean

}

export interface RawRuleSetCondition {
Expand Down
10 changes: 2 additions & 8 deletions crates/rspack_binding_options/src/options/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use napi_derive::napi;
use rspack_core::{
CacheOptions, CompilerOptions, Context, Experiments, IncrementalRebuild,
IncrementalRebuildMakeState, ModuleOptions, OutputOptions, Target, TreeShaking,
IncrementalRebuildMakeState, ModuleOptions, OutputOptions, Target,
};

mod raw_builtins;
Expand Down Expand Up @@ -58,7 +58,6 @@ pub struct RawOptions {
pub node: Option<RawNodeOption>,
pub profile: bool,
pub bail: bool,
pub builtins: RawBuiltins,
}

impl TryFrom<RawOptions> for CompilerOptions {
Expand Down Expand Up @@ -90,11 +89,6 @@ impl TryFrom<RawOptions> for CompilerOptions {
let snapshot = value.snapshot.into();
let node = value.node.map(|n| n.into());

let mut builtins = value.builtins.apply()?;
if experiments.rspack_future.new_treeshaking {
builtins.tree_shaking = TreeShaking::False;
}

Ok(CompilerOptions {
context,
mode,
Expand All @@ -112,7 +106,7 @@ impl TryFrom<RawOptions> for CompilerOptions {
dev_server: Default::default(),
profile: value.profile,
bail: value.bail,
builtins,
builtins: Default::default(),
})
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,6 @@ impl RawBuiltins {
Ok(Builtins {
define: Default::default(),
provide: Default::default(),
tree_shaking: self.tree_shaking.into(),
})
}
}
11 changes: 4 additions & 7 deletions crates/rspack_binding_options/src/options/raw_experiments.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
use napi_derive::napi;
use rspack_core::RspackFuture;

#[allow(clippy::empty_structs_with_brackets)]
#[derive(Debug, Default)]
#[napi(object)]
pub struct RawRspackFuture {
pub new_treeshaking: bool,
}
pub struct RawRspackFuture {}

#[derive(Debug, Default)]
#[napi(object)]
Expand All @@ -15,9 +14,7 @@ pub struct RawExperiments {
}

impl From<RawRspackFuture> for RspackFuture {
fn from(value: RawRspackFuture) -> Self {
Self {
new_treeshaking: value.new_treeshaking,
}
fn from(_value: RawRspackFuture) -> Self {
Self {}
}
}
10 changes: 5 additions & 5 deletions crates/rspack_core/src/build_chunk_graph/code_splitter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1382,11 +1382,11 @@ Or do you want to use the entrypoints '{name}' and '{runtime}' independently on
let modules = map
.get_mut(&block_id)
.expect("should have modules in block_modules_runtime_map");
let active_state = if self.compilation.options.is_new_tree_shaking() {
get_active_state_of_connections(&connections, runtime, &self.compilation.get_module_graph())
} else {
ConnectionState::Bool(true)
};
let active_state = get_active_state_of_connections(
&connections,
runtime,
&self.compilation.get_module_graph(),
);
modules.push((module_identifier, active_state, connections));
}
}
Expand Down
43 changes: 8 additions & 35 deletions crates/rspack_core/src/compiler/compilation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use dashmap::{DashMap, DashSet};
use indexmap::{IndexMap, IndexSet};
use itertools::Itertools;
use rayon::prelude::*;
use rspack_error::{error, Diagnostic, Result, Severity, TWithDiagnosticArray};
use rspack_error::{error, Diagnostic, Result, Severity};
use rspack_futures::FuturesResults;
use rspack_hash::{RspackHash, RspackHashDigest};
use rspack_hook::define_hook;
Expand All @@ -25,12 +25,13 @@ use super::{
make::{make_module_graph, update_module_graph, MakeArtifact, MakeParam},
module_executor::ModuleExecutor,
};
use crate::ExecuteModuleId;
use crate::{
build_chunk_graph::build_chunk_graph,
cache::{use_code_splitting_cache, Cache, CodeSplittingCache},
get_chunk_from_ukey, get_mut_chunk_from_ukey, is_source_equal, prepare_get_exports_type,
to_identifier,
tree_shaking::{optimizer, visitor::SymbolRef, BailoutFlag, OptimizeDependencyResult},
tree_shaking::visitor::SymbolRef,
BoxDependency, BoxModule, CacheCount, CacheOptions, Chunk, ChunkByUkey, ChunkContentHash,
ChunkGraph, ChunkGroupByUkey, ChunkGroupUkey, ChunkKind, ChunkUkey, CodeGenerationResults,
CompilationLogger, CompilationLogging, CompilerOptions, DependencyId, DependencyType, Entry,
Expand All @@ -39,7 +40,6 @@ use crate::{
ResolverFactory, RuntimeGlobals, RuntimeModule, RuntimeSpec, SharedPluginDriver, SourceType,
Stats,
};
use crate::{tree_shaking::visitor::OptimizeAnalyzeResult, ExecuteModuleId};

pub type BuildDependency = (
DependencyId,
Expand Down Expand Up @@ -159,8 +159,6 @@ pub struct Compilation {
pub(crate) named_chunk_groups: HashMap<String, ChunkGroupUkey>,
/// Collecting all used export symbol
pub used_symbol_ref: HashSet<SymbolRef>,
/// Collecting all module that need to skip in tree-shaking ast modification phase
pub bailout_module_identifiers: IdentifierMap<BailoutFlag>,

pub code_generation_results: CodeGenerationResults,
pub code_generated_modules: IdentifierSet,
Expand All @@ -170,7 +168,6 @@ pub struct Compilation {
// lazy compilation visit module
pub lazy_visit_modules: std::collections::HashSet<String>,
pub used_chunk_ids: HashSet<String>,
pub include_module_ids: IdentifierSet,

pub file_dependencies: IndexSet<PathBuf, BuildHasherDefault<FxHasher>>,
pub context_dependencies: IndexSet<PathBuf, BuildHasherDefault<FxHasher>>,
Expand Down Expand Up @@ -247,7 +244,6 @@ impl Compilation {
named_chunks: Default::default(),
named_chunk_groups: Default::default(),
used_symbol_ref: HashSet::default(),
bailout_module_identifiers: IdentifierMap::default(),

code_generation_results: Default::default(),
code_generated_modules: Default::default(),
Expand All @@ -263,7 +259,6 @@ impl Compilation {
build_dependencies: Default::default(),
side_effects_free_modules: IdentifierSet::default(),
module_item_map: IdentifierMap::default(),
include_module_ids: IdentifierSet::default(),

import_var_map: DashMap::new(),

Expand Down Expand Up @@ -696,12 +691,10 @@ impl Compilation {
)
.await?;

if self.options.is_new_tree_shaking() {
let logger = self.get_logger("rspack.Compilation");
let start = logger.time("finish module");
self.finish(self.plugin_driver.clone()).await?;
logger.time_end(start);
}
let logger = self.get_logger("rspack.Compilation");
let start = logger.time("finish module");
self.finish(self.plugin_driver.clone()).await?;
logger.time_end(start);

let module_graph = self.get_module_graph();
Ok(f(module_identifiers
Expand All @@ -725,8 +718,7 @@ impl Compilation {
) -> Result<()> {
// If the runtime optimization is not opt out, a module codegen should be executed for each runtime.
// Else, share same codegen result for all runtimes.
let used_exports_optimization = compilation.options.is_new_tree_shaking()
&& compilation.options.optimization.used_exports.is_true();
let used_exports_optimization = compilation.options.optimization.used_exports.is_true();
let results = compilation.code_generation_modules(
codegen_cache_counter,
used_exports_optimization,
Expand Down Expand Up @@ -950,16 +942,6 @@ impl Compilation {
Ok(())
}

pub async fn optimize_dependency(
&mut self,
) -> Result<TWithDiagnosticArray<OptimizeDependencyResult>> {
let logger = self.get_logger("rspack.Compilation");
let start = logger.time("optimize dependencies");
let result = optimizer::CodeSizeOptimizer::new(self).run().await;
logger.time_end(start);
result
}

pub fn entry_modules(&self) -> IdentifierSet {
self.make_artifact.entry_module_identifiers.clone()
}
Expand Down Expand Up @@ -1633,15 +1615,6 @@ impl Compilation {
pub fn has_module_import_export_change(&self) -> bool {
self.make_artifact.has_module_graph_change
}

// TODO remove it after remove old treeshaking
pub fn optimize_analyze_result_map(&self) -> &IdentifierMap<OptimizeAnalyzeResult> {
&self.make_artifact.optimize_analyze_result_map
}
// TODO remove it after remove old treeshaking
pub fn optimize_analyze_result_map_mut(&mut self) -> &mut IdentifierMap<OptimizeAnalyzeResult> {
&mut self.make_artifact.optimize_analyze_result_map
}
}

pub type CompilationAssets = HashMap<String, CompilationAsset>;
Expand Down
58 changes: 2 additions & 56 deletions crates/rspack_core/src/compiler/make/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,13 @@ pub mod repair;

use std::path::PathBuf;

use rayon::prelude::*;
use rspack_error::{Diagnostic, Result};
use rspack_identifier::{IdentifierMap, IdentifierSet};
use rspack_identifier::IdentifierSet;
use rustc_hash::FxHashSet as HashSet;

use self::{cutout::Cutout, file_counter::FileCounter, repair::repair};
use crate::{
tree_shaking::{visitor::OptimizeAnalyzeResult, BailoutFlag},
BuildDependency, Compilation, DependencyId, DependencyType, ModuleGraph, ModuleGraphPartial,
ModuleIdentifier,
BuildDependency, Compilation, DependencyId, ModuleGraph, ModuleGraphPartial, ModuleIdentifier,
};

#[derive(Debug, Default)]
Expand All @@ -27,7 +24,6 @@ pub struct MakeArtifact {
pub module_graph_partial: ModuleGraphPartial,
entry_dependencies: HashSet<DependencyId>,
pub entry_module_identifiers: IdentifierSet,
pub optimize_analyze_result_map: IdentifierMap<OptimizeAnalyzeResult>,
pub file_dependencies: FileCounter,
pub context_dependencies: FileCounter,
pub missing_dependencies: FileCounter,
Expand Down Expand Up @@ -124,11 +120,6 @@ pub fn make_module_graph(

artifact = update_module_graph_with_artifact(compilation, artifact, params)?;

if compilation.options.builtins.tree_shaking.enable() {
let module_graph = artifact.get_module_graph();
compilation.bailout_module_identifiers = calc_bailout_module_identifiers(&module_graph);
}

compilation.push_batch_diagnostic(std::mem::take(&mut artifact.diagnostics));
Ok(artifact)
}
Expand All @@ -142,11 +133,6 @@ pub async fn update_module_graph(

artifact = update_module_graph_with_artifact(compilation, artifact, params)?;

if compilation.options.builtins.tree_shaking.enable() {
let module_graph = artifact.get_module_graph();
compilation.bailout_module_identifiers = calc_bailout_module_identifiers(&module_graph);
}

compilation.push_batch_diagnostic(std::mem::take(&mut artifact.diagnostics));
compilation.swap_make_artifact(&mut artifact);
Ok(())
Expand All @@ -163,43 +149,3 @@ pub fn update_module_graph_with_artifact(
cutout.fix_artifact(&mut artifact);
Ok(artifact)
}

// TODO remove after remove old_treeshaking
fn calc_bailout_module_identifiers(module_graph: &ModuleGraph) -> IdentifierMap<BailoutFlag> {
// Avoid to introduce too much overhead,
// until we find a better way to align with webpack hmr behavior

// add context module and context element module to bailout_module_identifiers
module_graph
.dependencies()
.values()
.par_bridge()
.filter_map(|dep| {
if dep.as_context_dependency().is_some()
&& let Some(module) = module_graph.get_module_by_dependency_id(dep.id())
{
let mut values = vec![(module.identifier(), BailoutFlag::CONTEXT_MODULE)];
if let Some(dependencies) = module_graph.get_module_all_dependencies(&module.identifier()) {
for dependency in dependencies {
if let Some(dependency_module) =
module_graph.module_identifier_by_dependency_id(dependency)
{
values.push((*dependency_module, BailoutFlag::CONTEXT_MODULE));
}
}
}

Some(values)
} else if matches!(
dep.dependency_type(),
DependencyType::ContainerExposed | DependencyType::ProvideModuleForShared
) && let Some(module) = module_graph.get_module_by_dependency_id(dep.id())
{
Some(vec![(module.identifier(), BailoutFlag::CONTAINER_EXPOSED)])
} else {
None
}
})
.flatten()
.collect()
}
13 changes: 1 addition & 12 deletions crates/rspack_core/src/compiler/make/repair/add.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ impl Task<MakeTaskContext> for AddTask {
}

let module_identifier = self.module.identifier();
let is_new_treeshaking = context.compiler_options.is_new_tree_shaking();
let module_graph =
&mut MakeTaskContext::get_module_graph_mut(&mut context.module_graph_partial);

Expand All @@ -43,7 +42,6 @@ impl Task<MakeTaskContext> for AddTask {
self.original_module_identifier,
self.dependencies,
*issuer,
is_new_treeshaking,
)?;

// reused module
Expand All @@ -59,7 +57,6 @@ impl Task<MakeTaskContext> for AddTask {
self.original_module_identifier,
self.dependencies,
module_identifier,
is_new_treeshaking,
)?;

// reused module
Expand All @@ -73,7 +70,6 @@ impl Task<MakeTaskContext> for AddTask {
self.original_module_identifier,
self.dependencies,
module_identifier,
is_new_treeshaking,
)?;

if self.is_entry {
Expand Down Expand Up @@ -102,16 +98,9 @@ fn set_resolved_module(
original_module_identifier: Option<ModuleIdentifier>,
dependencies: Vec<DependencyId>,
module_identifier: ModuleIdentifier,
// TODO: removed when new treeshaking is stable
is_new_treeshaking: bool,
) -> Result<()> {
for dependency in dependencies {
module_graph.set_resolved_module(
original_module_identifier,
dependency,
module_identifier,
is_new_treeshaking,
)?;
module_graph.set_resolved_module(original_module_identifier, dependency, module_identifier)?;
}
Ok(())
}
5 changes: 0 additions & 5 deletions crates/rspack_core/src/compiler/make/repair/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,11 +116,6 @@ impl Task<MakeTaskContext> for BuildResultTask {

let module_graph =
&mut MakeTaskContext::get_module_graph_mut(&mut context.module_graph_partial);
if context.compiler_options.builtins.tree_shaking.enable() {
context
.optimize_analyze_result_map
.insert(module.identifier(), build_result.analyze_result);
}

if !diagnostics.is_empty() {
context.make_failed_module.insert(module.identifier());
Expand Down
Loading

3 comments on commit 25edcdc

@rspack-bot
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

📝 Ran ecosystem CI: Open

suite result
modernjs ❌ failure
_selftest ✅ success
nx ✅ success
rspress ✅ success
rsbuild ✅ success
compat ✅ success
examples ✅ success

@rspack-bot
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

📝 Benchmark detail: Open

Name Base (2024-05-22 c7db90a) Current Change
10000_development-mode + exec 2.66 s ± 36 ms 2.61 s ± 21 ms -2.22 %
10000_development-mode_hmr + exec 740 ms ± 16 ms 736 ms ± 11 ms -0.56 %
10000_production-mode + exec 2.57 s ± 28 ms 2.57 s ± 39 ms -0.23 %
arco-pro_development-mode + exec 2.54 s ± 37 ms 2.37 s ± 85 ms -6.71 %
arco-pro_development-mode_hmr + exec 438 ms ± 3.2 ms 441 ms ± 1.2 ms +0.63 %
arco-pro_development-mode_hmr_intercept-plugin + exec 447 ms ± 2.1 ms 451 ms ± 1.6 ms +0.82 %
arco-pro_development-mode_intercept-plugin + exec 3.37 s ± 73 ms 3.19 s ± 50 ms -5.19 %
arco-pro_production-mode + exec 4.1 s ± 66 ms 3.95 s ± 75 ms -3.50 %
arco-pro_production-mode_intercept-plugin + exec 4.95 s ± 75 ms 4.78 s ± 65 ms -3.29 %
threejs_development-mode_10x + exec 2 s ± 17 ms 1.97 s ± 19 ms -1.93 %
threejs_development-mode_10x_hmr + exec 785 ms ± 7.1 ms 782 ms ± 8.3 ms -0.34 %
threejs_production-mode_10x + exec 5.33 s ± 36 ms 5.29 s ± 35 ms -0.62 %

@rspack-bot
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

📝 Ran ecosystem CI: Open

suite result
modernjs ✅ success
_selftest ✅ success
nx ✅ success
rspress ✅ success
rsbuild ✅ success
compat ✅ success
examples ✅ success

Please sign in to comment.