Skip to content

Commit

Permalink
change to NoHashHasher for ID keys (#3864)
Browse files Browse the repository at this point in the history
  • Loading branch information
sokra committed Feb 17, 2023
1 parent 6a15aa3 commit a313e24
Show file tree
Hide file tree
Showing 11 changed files with 75 additions and 57 deletions.
2 changes: 1 addition & 1 deletion crates/auto-hash-map/src/map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ impl<K, V> AutoMap<K, V, RandomState> {

impl<K, V, H: BuildHasher> AutoMap<K, V, H> {
/// see [HashMap::with_hasher](https://doc.rust-lang.org/std/collections/hash_map/struct.HashMap.html#method.with_hasher)
pub fn with_hasher() -> Self {
pub const fn with_hasher() -> Self {
AutoMap::List(Vec::new())
}

Expand Down
2 changes: 1 addition & 1 deletion crates/auto-hash-map/src/set.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ impl<K> AutoSet<K, RandomState> {

impl<K, H: BuildHasher> AutoSet<K, H> {
/// see [HashSet::with_hasher](https://doc.rust-lang.org/std/collections/hash_set/struct.HashSet.html#method.with_hasher)
pub fn with_hasher() -> Self {
pub const fn with_hasher() -> Self {
Self {
map: AutoMap::with_hasher(),
}
Expand Down
21 changes: 12 additions & 9 deletions crates/turbo-tasks-memory/src/cell.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use std::{
};

use auto_hash_map::AutoSet;
use nohash_hasher::BuildNoHashHasher;
use turbo_tasks::{
backend::CellContent,
event::{Event, EventListener},
Expand All @@ -22,18 +23,20 @@ pub(crate) enum Cell {
/// tracking is still active. Any update will invalidate dependent tasks.
/// Assigning a value will transition to the Value state.
/// Reading this cell will transition to the Recomputing state.
TrackedValueless { dependent_tasks: AutoSet<TaskId> },
TrackedValueless {
dependent_tasks: AutoSet<TaskId, BuildNoHashHasher<TaskId>>,
},
/// Someone wanted to read the content and it was not available. The content
/// is now being recomputed.
/// Assigning a value will transition to the Value state.
Recomputing {
dependent_tasks: AutoSet<TaskId>,
dependent_tasks: AutoSet<TaskId, BuildNoHashHasher<TaskId>>,
event: Event,
},
/// The content was set only once and is tracked.
/// GC operation will transition to the TrackedValueless state.
Value {
dependent_tasks: AutoSet<TaskId>,
dependent_tasks: AutoSet<TaskId, BuildNoHashHasher<TaskId>>,
content: CellContent,
},
}
Expand Down Expand Up @@ -90,10 +93,10 @@ impl Cell {
}

/// Returns the list of dependent tasks.
pub fn dependent_tasks(&self) -> &AutoSet<TaskId> {
pub fn dependent_tasks(&self) -> &AutoSet<TaskId, BuildNoHashHasher<TaskId>> {
match self {
Cell::Empty => {
static EMPTY: AutoSet<TaskId> = AutoSet::new();
static EMPTY: AutoSet<TaskId, BuildNoHashHasher<TaskId>> = AutoSet::with_hasher();
&EMPTY
}
Cell::Value {
Expand All @@ -111,7 +114,7 @@ impl Cell {
/// Switch the cell to recomputing state.
fn recompute(
&mut self,
dependent_tasks: AutoSet<TaskId>,
dependent_tasks: AutoSet<TaskId, BuildNoHashHasher<TaskId>>,
description: impl Fn() -> String + Sync + Send + 'static,
note: impl Fn() -> String + Sync + Send + 'static,
) -> EventListener {
Expand Down Expand Up @@ -159,7 +162,7 @@ impl Cell {
) -> Result<CellContent, RecomputingCell> {
match self {
Cell::Empty => {
let listener = self.recompute(AutoSet::new(), description, note);
let listener = self.recompute(AutoSet::default(), description, note);
Err(RecomputingCell {
listener,
schedule: true,
Expand Down Expand Up @@ -206,7 +209,7 @@ impl Cell {
Cell::Empty => {
*self = Cell::Value {
content,
dependent_tasks: AutoSet::new(),
dependent_tasks: AutoSet::default(),
};
}
&mut Cell::Recomputing {
Expand All @@ -229,7 +232,7 @@ impl Cell {
}
*self = Cell::Value {
content,
dependent_tasks: AutoSet::new(),
dependent_tasks: AutoSet::default(),
};
}
Cell::Value {
Expand Down
7 changes: 4 additions & 3 deletions crates/turbo-tasks-memory/src/memory_backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ use std::{
use anyhow::{bail, Result};
use auto_hash_map::AutoSet;
use dashmap::{mapref::entry::Entry, DashMap};
use nohash_hasher::BuildNoHashHasher;
use rustc_hash::FxHasher;
use tokio::task::futures::TaskLocalFuture;
use turbo_tasks::{
Expand Down Expand Up @@ -685,9 +686,9 @@ impl Backend for MemoryBackend {
}

pub(crate) enum Job {
RemoveFromScopes(AutoSet<TaskId>, Vec<TaskScopeId>),
RemoveFromScope(AutoSet<TaskId>, TaskScopeId),
ScheduleWhenDirtyFromScope(AutoSet<TaskId>),
RemoveFromScopes(AutoSet<TaskId, BuildNoHashHasher<TaskId>>, Vec<TaskScopeId>),
RemoveFromScope(AutoSet<TaskId, BuildNoHashHasher<TaskId>>, TaskScopeId),
ScheduleWhenDirtyFromScope(AutoSet<TaskId, BuildNoHashHasher<TaskId>>),
/// Add tasks from a scope. Scheduled by `run_add_from_scope_queue` to
/// split off work.
AddToScopeQueue {
Expand Down
15 changes: 8 additions & 7 deletions crates/turbo-tasks-memory/src/memory_backend_with_pg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ use anyhow::{anyhow, Result};
use auto_hash_map::AutoSet;
use concurrent_queue::ConcurrentQueue;
use dashmap::{mapref::entry::Entry, DashMap, DashSet};
use nohash_hasher::BuildNoHashHasher;
use turbo_tasks::{
backend::{
Backend, BackendJobId, CellContent, PersistentTaskType, TaskExecutionSpec,
Expand Down Expand Up @@ -70,11 +71,11 @@ struct MemoryTaskState {
need_persist: bool,
has_changes: bool,
freshness: TaskFreshness,
cells: HashMap<CellId, (TaskCell, AutoSet<TaskId>)>,
cells: HashMap<CellId, (TaskCell, AutoSet<TaskId, BuildNoHashHasher<TaskId>>)>,
output: Option<Result<RawVc, SharedError>>,
output_dependent: AutoSet<TaskId>,
output_dependent: AutoSet<TaskId, BuildNoHashHasher<TaskId>>,
dependencies: AutoSet<RawVc>,
children: AutoSet<TaskId>,
children: AutoSet<TaskId, BuildNoHashHasher<TaskId>>,
event: Event,
event_cells: Event,
}
Expand All @@ -85,7 +86,7 @@ impl MemoryTaskState {
freshness,
need_persist: Default::default(),
has_changes: Default::default(),
cells: Default::default(),
cells: HashMap::default(),
output: Default::default(),
output_dependent: Default::default(),
dependencies: Default::default(),
Expand Down Expand Up @@ -272,10 +273,10 @@ impl<P: PersistedGraph> MemoryBackendWithPersistedGraph<P> {
cells: data
.cells
.into_iter()
.map(|(k, s)| (k, (s, AutoSet::new())))
.map(|(k, s)| (k, (s, AutoSet::default())))
.collect(),
output: Some(Ok(data.output)),
output_dependent: AutoSet::new(),
output_dependent: AutoSet::default(),
dependencies: data.dependencies.into_iter().collect(),
children: data.children.into_iter().collect(),
need_persist: Default::default(),
Expand Down Expand Up @@ -1096,7 +1097,7 @@ impl<P: PersistedGraph> Backend for MemoryBackendWithPersistedGraph<P> {
mem_state.output = Some(result.map_err(SharedError::new));
take(&mut mem_state.output_dependent)
} else {
AutoSet::new()
AutoSet::default()
};

drop(state);
Expand Down
5 changes: 3 additions & 2 deletions crates/turbo-tasks-memory/src/output.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,14 @@ use std::{

use anyhow::{anyhow, Error, Result};
use auto_hash_map::AutoSet;
use nohash_hasher::BuildNoHashHasher;
use turbo_tasks::{util::SharedError, RawVc, TaskId, TurboTasksBackendApi};

#[derive(Default, Debug)]
pub struct Output {
pub(crate) content: OutputContent,
updates: u32,
pub(crate) dependent_tasks: AutoSet<TaskId>,
pub(crate) dependent_tasks: AutoSet<TaskId, BuildNoHashHasher<TaskId>>,
}

#[derive(Clone, Debug)]
Expand Down Expand Up @@ -95,7 +96,7 @@ impl Output {
}
}

pub fn dependent_tasks(&self) -> &AutoSet<TaskId> {
pub fn dependent_tasks(&self) -> &AutoSet<TaskId, BuildNoHashHasher<TaskId>> {
&self.dependent_tasks
}

Expand Down
39 changes: 23 additions & 16 deletions crates/turbo-tasks-memory/src/scope.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ pub struct TaskScopeState {
active: isize,
/// When not active, this list contains all dirty tasks.
/// When the scope becomes active, these need to be scheduled.
dirty_tasks: AutoSet<TaskId>,
dirty_tasks: AutoSet<TaskId, BuildNoHashHasher<TaskId>>,
/// All child scopes, when the scope becomes active, child scopes need to
/// become active too
children: CountHashSet<TaskScopeId, BuildNoHashHasher<TaskScopeId>>,
Expand All @@ -151,9 +151,16 @@ pub struct TaskScopeState {
pub parents: CountHashSet<TaskScopeId, BuildNoHashHasher<TaskScopeId>>,
/// Tasks that have read children
/// When they change these tasks are invalidated
dependent_tasks: AutoSet<TaskId>,
dependent_tasks: AutoSet<TaskId, BuildNoHashHasher<TaskId>>,
/// Emitted collectibles with count and dependent_tasks by trait type
collectibles: AutoMap<TraitTypeId, (CountHashSet<RawVc>, AutoSet<TaskId>)>,
collectibles: AutoMap<
TraitTypeId,
(
CountHashSet<RawVc>,
AutoSet<TaskId, BuildNoHashHasher<TaskId>>,
),
BuildNoHashHasher<TraitTypeId>,
>,
}

impl TaskScope {
Expand All @@ -168,10 +175,10 @@ impl TaskScope {
#[cfg(feature = "print_scope_updates")]
id,
active: 0,
dirty_tasks: AutoSet::new(),
dirty_tasks: AutoSet::default(),
children: CountHashSet::new(),
collectibles: AutoMap::new(),
dependent_tasks: AutoSet::new(),
collectibles: AutoMap::default(),
dependent_tasks: AutoSet::default(),
event: Event::new(move || {
#[cfg(feature = "print_scope_updates")]
return format!("TaskScope({id})::event");
Expand All @@ -195,10 +202,10 @@ impl TaskScope {
#[cfg(feature = "print_scope_updates")]
id,
active: 1,
dirty_tasks: AutoSet::new(),
dirty_tasks: AutoSet::default(),
children: CountHashSet::new(),
collectibles: AutoMap::new(),
dependent_tasks: AutoSet::new(),
collectibles: AutoMap::default(),
dependent_tasks: AutoSet::default(),
event: Event::new(move || {
#[cfg(feature = "print_scope_updates")]
return format!("TaskScope({id})::event");
Expand Down Expand Up @@ -426,14 +433,14 @@ impl TaskScope {
}

pub struct ScopeChildChangeEffect {
pub notify: AutoSet<TaskId>,
pub notify: AutoSet<TaskId, BuildNoHashHasher<TaskId>>,
pub active: bool,
/// `true` when the child to parent relationship needs to be updated
pub parent: bool,
}

pub struct ScopeCollectibleChangeEffect {
pub notify: AutoSet<TaskId>,
pub notify: AutoSet<TaskId, BuildNoHashHasher<TaskId>>,
}

impl TaskScopeState {
Expand All @@ -447,7 +454,7 @@ impl TaskScopeState {
pub fn increment_active(
&mut self,
more_jobs: &mut Vec<TaskScopeId>,
) -> Option<AutoSet<TaskId>> {
) -> Option<AutoSet<TaskId, BuildNoHashHasher<TaskId>>> {
self.increment_active_by(1, more_jobs)
}
/// increments the active counter, returns list of tasks that need to be
Expand All @@ -458,7 +465,7 @@ impl TaskScopeState {
&mut self,
count: usize,
more_jobs: &mut Vec<TaskScopeId>,
) -> Option<AutoSet<TaskId>> {
) -> Option<AutoSet<TaskId, BuildNoHashHasher<TaskId>>> {
let was_zero = self.active <= 0;
self.active += count as isize;
if self.active > 0 && was_zero {
Expand Down Expand Up @@ -553,7 +560,7 @@ impl TaskScopeState {

/// Takes all children or collectibles dependent tasks and returns them for
/// notification.
pub fn take_all_dependent_tasks(&mut self) -> AutoSet<TaskId> {
pub fn take_all_dependent_tasks(&mut self) -> AutoSet<TaskId, BuildNoHashHasher<TaskId>> {
let mut set = self.take_dependent_tasks();
self.collectibles = take(&mut self.collectibles)
.into_iter()
Expand Down Expand Up @@ -614,7 +621,7 @@ impl TaskScopeState {
debug_assert!(result, "this must be always a new entry");
log_scope_update!("add_collectible {} -> {}", *self.id, collectible);
Some(ScopeCollectibleChangeEffect {
notify: AutoSet::new(),
notify: AutoSet::default(),
})
}
}
Expand Down Expand Up @@ -677,7 +684,7 @@ impl TaskScopeState {
}
}

pub fn take_dependent_tasks(&mut self) -> AutoSet<TaskId> {
pub fn take_dependent_tasks(&mut self) -> AutoSet<TaskId, BuildNoHashHasher<TaskId>> {
take(&mut self.dependent_tasks)
}
}
Loading

1 comment on commit a313e24

@vercel
Copy link

@vercel vercel bot commented on a313e24 Feb 17, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.