Skip to content

Commit

Permalink
Merge #51
Browse files Browse the repository at this point in the history
51: unify implementations in pl and std r=matklad a=matklad



Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
  • Loading branch information
bors[bot] and matklad committed Sep 1, 2019
2 parents 0f35ac4 + f118d54 commit ac69dd6
Show file tree
Hide file tree
Showing 3 changed files with 91 additions and 124 deletions.
92 changes: 28 additions & 64 deletions src/imp_pl.rs
@@ -1,7 +1,5 @@
use std::{
cell::UnsafeCell,
fmt,
hint::unreachable_unchecked,
panic::{RefUnwindSafe, UnwindSafe},
sync::atomic::{AtomicBool, Ordering},
};
Expand All @@ -11,7 +9,7 @@ use parking_lot::{lock_api::RawMutex as _RawMutex, RawMutex};
pub(crate) struct OnceCell<T> {
mutex: Mutex,
is_initialized: AtomicBool,
value: UnsafeCell<Option<T>>,
pub(crate) value: UnsafeCell<Option<T>>,
}

// Why do we need `T: Send`?
Expand All @@ -25,12 +23,6 @@ unsafe impl<T: Send> Send for OnceCell<T> {}
impl<T: RefUnwindSafe + UnwindSafe> RefUnwindSafe for OnceCell<T> {}
impl<T: UnwindSafe> UnwindSafe for OnceCell<T> {}

impl<T: fmt::Debug> fmt::Debug for OnceCell<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OnceCell").field("value", &self.get()).finish()
}
}

impl<T> OnceCell<T> {
pub(crate) const fn new() -> OnceCell<T> {
OnceCell {
Expand All @@ -40,64 +32,36 @@ impl<T> OnceCell<T> {
}
}

pub(crate) fn get(&self) -> Option<&T> {
if self.is_initialized.load(Ordering::Acquire) {
// This is safe: if we've read `true` with `Acquire`, that means
// we've are paired with `Release` store, which sets the value.
// Additionally, no one invalidates value after `is_initialized` is
// set to `true`
let value: &Option<T> = unsafe { &*self.value.get() };
value.as_ref()
} else {
None
}
/// Safety: synchronizes with store to value via Release/Acquire.
#[inline]
pub(crate) fn is_initialized(&self) -> bool {
self.is_initialized.load(Ordering::Acquire)
}

pub(crate) fn get_or_try_init<F: FnOnce() -> Result<T, E>, E>(&self, f: F) -> Result<&T, E> {
// Standard double-checked locking pattern.

// Optimistically check if value is initialized, without locking a
// mutex.
if !self.is_initialized.load(Ordering::Acquire) {
let _guard = self.mutex.lock();
// Relaxed is OK, because mutex unlock/lock establishes "happens
// before".
if !self.is_initialized.load(Ordering::Relaxed) {
// We are calling user-supplied function and need to be careful.
// - if it returns Err, we unlock mutex and return without touching anything
// - if it panics, we unlock mutex and propagate panic without touching anything
// - if it calls `set` or `get_or_try_init` re-entrantly, we get a deadlock on
// mutex, which is important for safety. We *could* detect this and panic,
// but that is more complicated
// - finally, if it returns Ok, we store the value and store the flag with
// `Release`, which synchronizes with `Acquire`s.
let value = f()?;
let slot: &mut Option<T> = unsafe { &mut *self.value.get() };
debug_assert!(slot.is_none());
*slot = Some(value);
self.is_initialized.store(true, Ordering::Release);
}
/// Safety: synchronizes with store to value via `is_initialized` or mutex
/// lock/unlock, writes value only once because of the mutex.
#[cold]
pub(crate) fn initialize<F, E>(&self, f: F) -> Result<(), E>
where
F: FnOnce() -> Result<T, E>,
{
let _guard = self.mutex.lock();
if !self.is_initialized() {
// We are calling user-supplied function and need to be careful.
// - if it returns Err, we unlock mutex and return without touching anything
// - if it panics, we unlock mutex and propagate panic without touching anything
// - if it calls `set` or `get_or_try_init` re-entrantly, we get a deadlock on
// mutex, which is important for safety. We *could* detect this and panic,
// but that is more complicated
// - finally, if it returns Ok, we store the value and store the flag with
// `Release`, which synchronizes with `Acquire`s.
let value = f()?;
let slot: &mut Option<T> = unsafe { &mut *self.value.get() };
debug_assert!(slot.is_none());
*slot = Some(value);
self.is_initialized.store(true, Ordering::Release);
}

// Value is initialized here, because we've read `true` from
// `is_initialized`, and have a "happens before" due to either
// Acquire/Release pair (fast path) or mutex unlock (slow path).
// While we could have just called `get`, that would be twice
// as slow!
let value: &Option<T> = unsafe { &*self.value.get() };
return match value.as_ref() {
Some(it) => Ok(it),
None => {
debug_assert!(false);
unsafe { unreachable_unchecked() }
}
};
}

pub(crate) fn into_inner(self) -> Option<T> {
// Because `into_inner` takes `self` by value, the compiler statically verifies
// that it is not currently borrowed. So it is safe to move out `Option<T>`.
self.value.into_inner()
Ok(())
}
}

Expand Down
68 changes: 22 additions & 46 deletions src/imp_std.rs
Expand Up @@ -22,7 +22,7 @@ pub(crate) struct OnceCell<T> {
// that far. It was stabilized in 1.36.0, so, if you are reading this and
// it's higher than 1.46.0 outside, please send a PR! ;) (and to the same
// for `Lazy`, while we are at it).
value: UnsafeCell<Option<T>>,
pub(crate) value: UnsafeCell<Option<T>>,
}

// Why do we need `T: Send`?
Expand Down Expand Up @@ -69,72 +69,46 @@ impl<T> OnceCell<T> {
}
}

pub(crate) fn into_inner(self) -> Option<T> {
// Because `into_inner` takes `self` by value, the compiler statically verifies
// that it is not currently borrowed. So it is safe to move out `Option<T>`.
self.value.into_inner()
}

pub(crate) fn get(&self) -> Option<&T> {
if self.is_completed() {
let slot: &Option<T> = unsafe { &*self.value.get() };
match slot {
Some(value) => Some(value),
// This unsafe does improve performance, see `examples/bench`.
None => unsafe { std::hint::unreachable_unchecked() },
}
} else {
None
}
/// Safety: synchronizes with store to value via Release/(Acquire|SeqCst).
#[inline]
pub(crate) fn is_initialized(&self) -> bool {
// An `Acquire` load is enough because that makes all the initialization
// operations visible to us, and, this being a fast path, weaker
// ordering helps with performance. This `Acquire` synchronizes with
// `SeqCst` operations on the slow path.
self.state.load(Ordering::Acquire) == COMPLETE
}

pub fn get_or_try_init<F, E>(&self, f: F) -> Result<&T, E>
/// Safety: synchronizes with store to value via SeqCst read from state,
/// writes value only once because we never get to INCOMPLETE state after a
/// successful write.
#[cold]
pub(crate) fn initialize<F, E>(&self, f: F) -> Result<(), E>
where
F: FnOnce() -> Result<T, E>,
{
// Fast path check
if let Some(value) = self.get() {
return Ok(value);
}

let mut f = Some(f);
let mut err: Option<E> = None;
let mut res: Result<(), E> = Ok(());
let slot = &self.value;
get_or_try_init_inner(&self.state, &mut || {
initialize_inner(&self.state, &mut || {
let f = f.take().unwrap();
match f() {
Ok(value) => {
unsafe { *slot.get() = Some(value) };
true
}
Err(e) => {
err = Some(e);
res = Err(e);
false
}
}
});
match err {
Some(err) => Err(err),
None => {
let value: &T = unsafe { &*slot.get() }.as_ref().unwrap();
Ok(value)
}
}
}

#[inline]
fn is_completed(&self) -> bool {
// An `Acquire` load is enough because that makes all the initialization
// operations visible to us, and, this being a fast path, weaker
// ordering helps with performance. This `Acquire` synchronizes with
// `SeqCst` operations on the slow path.
self.state.load(Ordering::Acquire) == COMPLETE
res
}
}

// Note: this is intentionally monomorphic
#[cold]
fn get_or_try_init_inner(my_state: &AtomicUsize, init: &mut dyn FnMut() -> bool) -> bool {
fn initialize_inner(my_state: &AtomicUsize, init: &mut dyn FnMut() -> bool) -> bool {
// This cold path uses SeqCst consistently because the
// performance difference really does not matter there, and
// SeqCst minimizes the chances of something going wrong.
Expand Down Expand Up @@ -163,6 +137,7 @@ fn get_or_try_init_inner(my_state: &AtomicUsize, init: &mut dyn FnMut() -> bool)
// case.
let mut complete = Finish { failed: true, my_state };
let success = init();
// Difference from std: abort if `init` errored.
complete.failed = !success;
return success;
}
Expand Down Expand Up @@ -209,6 +184,7 @@ impl Drop for Finish<'_> {
// Swap out our state with however we finished. We should only ever see
// an old state which was RUNNING.
let queue = if self.failed {
// Difference from std: flip back to INCOMPLETE rather than POISONED.
self.my_state.swap(INCOMPLETE, Ordering::SeqCst)
} else {
self.my_state.swap(COMPLETE, Ordering::SeqCst)
Expand Down Expand Up @@ -244,7 +220,7 @@ mod tests {
impl<T> OnceCell<T> {
fn init(&self, f: impl FnOnce() -> T) {
enum Void {}
let _ = self.get_or_try_init(|| Ok::<T, Void>(f()));
let _ = self.initialize(|| Ok::<T, Void>(f()));
}
}

Expand Down
55 changes: 41 additions & 14 deletions src/lib.rs
Expand Up @@ -496,11 +496,9 @@ pub mod unsync {
/// assert_eq!(&*lazy, &92);
/// ```
pub fn force(this: &Lazy<T, F>) -> &T {
this.cell.get_or_init(|| {
match this.init.take() {
Some(f) => f(),
None => panic!("Lazy instance has previously been poisoned"),
}
this.cell.get_or_init(|| match this.init.take() {
Some(f) => f(),
None => panic!("Lazy instance has previously been poisoned"),
})
}
}
Expand All @@ -515,7 +513,7 @@ pub mod unsync {

#[cfg(feature = "std")]
pub mod sync {
use std::{cell::Cell, fmt, panic::RefUnwindSafe};
use std::{cell::Cell, fmt, hint::unreachable_unchecked, panic::RefUnwindSafe};

use crate::imp::OnceCell as Imp;

Expand Down Expand Up @@ -599,7 +597,28 @@ pub mod sync {
/// Returns `None` if the cell is empty, or being initialized. This
/// method never blocks.
pub fn get(&self) -> Option<&T> {
self.0.get()
if self.0.is_initialized() {
// Safe b/c checked is_initialize
Some(unsafe { self.get_unchecked() })
} else {
None
}
}

/// Safety to call if guarded by `initialize`, `is_initialized`
///
/// Implementations of those functions in `imp` must provide proper
/// synchronization and write-once property
unsafe fn get_unchecked(&self) -> &T {
let slot: &Option<T> = &*self.0.value.get();
match slot {
Some(value) => value,
// This unsafe does improve performance, see `examples/bench`.
None => {
debug_assert!(false);
unreachable_unchecked()
}
}
}

/// Sets the contents of this cell to `value`.
Expand Down Expand Up @@ -700,7 +719,15 @@ pub mod sync {
where
F: FnOnce() -> Result<T, E>,
{
self.0.get_or_try_init(f)
// Fast path check
if let Some(value) = self.get() {
return Ok(value);
}
self.0.initialize(f)?;

// Safe b/c called initialize
debug_assert!(self.0.is_initialized());
Ok(unsafe { self.get_unchecked() })
}

/// Consumes the `OnceCell`, returning the wrapped value. Returns
Expand All @@ -719,7 +746,9 @@ pub mod sync {
/// assert_eq!(cell.into_inner(), Some("hello".to_string()));
/// ```
pub fn into_inner(self) -> Option<T> {
self.0.into_inner()
// Because `into_inner` takes `self` by value, the compiler statically verifies
// that it is not currently borrowed. So it is safe to move out `Option<T>`.
self.0.value.into_inner()
}
}

Expand Down Expand Up @@ -800,11 +829,9 @@ pub mod sync {
/// assert_eq!(&*lazy, &92);
/// ```
pub fn force(this: &Lazy<T, F>) -> &T {
this.cell.get_or_init(|| {
match this.init.take() {
Some(f) => f(),
None => panic!("Lazy instance has previously been poisoned"),
}
this.cell.get_or_init(|| match this.init.take() {
Some(f) => f(),
None => panic!("Lazy instance has previously been poisoned"),
})
}
}
Expand Down

0 comments on commit ac69dd6

Please sign in to comment.