crossbeam-epoch/src/lib.rs
Line | Count | Source |
1 | 1 | //! Epoch-based memory reclamation.//! Epoch-based memory reclamation. |
2 | | //! |
3 | | //! An interesting problem concurrent collections deal with comes from the remove operation. |
4 | | //! Suppose that a thread removes an element from a lock-free map, while another thread is reading |
5 | | //! that same element at the same time. The first thread must wait until the second thread stops |
6 | | //! reading the element. Only then it is safe to destruct it. |
7 | | //! |
8 | | //! Programming languages that come with garbage collectors solve this problem trivially. The |
9 | | //! garbage collector will destruct the removed element when no thread can hold a reference to it |
10 | | //! anymore. |
11 | | //! |
12 | | //! This crate implements a basic memory reclamation mechanism, which is based on epochs. When an |
13 | | //! element gets removed from a concurrent collection, it is inserted into a pile of garbage and |
14 | | //! marked with the current epoch. Every time a thread accesses a collection, it checks the current |
15 | | //! epoch, attempts to increment it, and destructs some garbage that became so old that no thread |
16 | | //! can be referencing it anymore. |
17 | | //! |
18 | | //! That is the general mechanism behind epoch-based memory reclamation, but the details are a bit |
19 | | //! more complicated. Anyhow, memory reclamation is designed to be fully automatic and something |
20 | | //! users of concurrent collections don't have to worry much about. |
21 | | //! |
22 | | //! # Pointers |
23 | | //! |
24 | | //! Concurrent collections are built using atomic pointers. This module provides [`Atomic`], which |
25 | | //! is just a shared atomic pointer to a heap-allocated object. Loading an [`Atomic`] yields a |
26 | | //! [`Shared`], which is an epoch-protected pointer through which the loaded object can be safely |
27 | | //! read. |
28 | | //! |
29 | | //! # Pinning |
30 | | //! |
31 | | //! Before an [`Atomic`] can be loaded, a participant must be [`pin`]ned. By pinning a participant |
32 | | //! we declare that any object that gets removed from now on must not be destructed just |
33 | | //! yet. Garbage collection of newly removed objects is suspended until the participant gets |
34 | | //! unpinned. |
35 | | //! |
36 | | //! # Garbage |
37 | | //! |
38 | | //! Objects that get removed from concurrent collections must be stashed away until all currently |
39 | | //! pinned participants get unpinned. Such objects can be stored into a thread-local or global |
40 | | //! storage, where they are kept until the right time for their destruction comes. |
41 | | //! |
42 | | //! There is a global shared instance of garbage queue. You can [`defer`](Guard::defer) the execution of an |
43 | | //! arbitrary function until the global epoch is advanced enough. Most notably, concurrent data |
44 | | //! structures may defer the deallocation of an object. |
45 | | //! |
46 | | //! # APIs |
47 | | //! |
48 | | //! For majority of use cases, just use the default garbage collector by invoking [`pin`]. If you |
49 | | //! want to create your own garbage collector, use the [`Collector`] API. |
50 | | |
51 | | #![doc(test( |
52 | | no_crate_inject, |
53 | | attr( |
54 | | deny(warnings, rust_2018_idioms), |
55 | | allow(dead_code, unused_assignments, unused_variables) |
56 | | ) |
57 | | ))] |
58 | | #![warn( |
59 | | missing_docs, |
60 | | missing_debug_implementations, |
61 | | rust_2018_idioms, |
62 | | unreachable_pub |
63 | | )] |
64 | | #![cfg_attr(not(feature = "std"), no_std)] |
65 | | #![cfg_attr(feature = "nightly", feature(cfg_target_has_atomic))] |
66 | | #![cfg_attr(feature = "nightly", feature(const_fn))] |
67 | | // matches! requires Rust 1.42 |
68 | | #![allow(clippy::match_like_matches_macro)] |
69 | | |
70 | | use cfg_if::cfg_if; |
71 | | |
72 | | #[cfg(loom_crossbeam)] |
73 | | #[allow(unused_imports, dead_code)] |
74 | | mod primitive { |
75 | | pub(crate) mod cell { |
76 | | pub(crate) use loom::cell::UnsafeCell; |
77 | | } |
78 | | pub(crate) mod sync { |
79 | | pub(crate) mod atomic { |
80 | | use core::sync::atomic::Ordering; |
81 | | pub(crate) use loom::sync::atomic::AtomicUsize; |
82 | | pub(crate) fn fence(ord: Ordering) { |
83 | | if let Ordering::Acquire = ord { |
84 | | } else { |
85 | | // FIXME: loom only supports acquire fences at the moment. |
86 | | // https://github.com/tokio-rs/loom/issues/117 |
87 | | // let's at least not panic... |
88 | | // this may generate some false positives (`SeqCst` is stronger than `Acquire` |
89 | | // for example), and some false negatives (`Relaxed` is weaker than `Acquire`), |
90 | | // but it's the best we can do for the time being. |
91 | | } |
92 | | loom::sync::atomic::fence(Ordering::Acquire) |
93 | | } |
94 | | |
95 | | // FIXME: loom does not support compiler_fence at the moment. |
96 | | // https://github.com/tokio-rs/loom/issues/117 |
97 | | // we use fence as a stand-in for compiler_fence for the time being. |
98 | | // this may miss some races since fence is stronger than compiler_fence, |
99 | | // but it's the best we can do for the time being. |
100 | | pub(crate) use self::fence as compiler_fence; |
101 | | } |
102 | | pub(crate) use loom::sync::Arc; |
103 | | } |
104 | | pub(crate) use loom::lazy_static; |
105 | | pub(crate) use loom::thread_local; |
106 | | } |
107 | | #[cfg(not(loom_crossbeam))] |
108 | | #[allow(unused_imports, dead_code)] |
109 | | mod primitive { |
110 | | #[cfg(any(feature = "alloc", feature = "std"))] |
111 | | pub(crate) mod cell { |
112 | | #[derive(Debug)] |
113 | | #[repr(transparent)] |
114 | | pub(crate) struct UnsafeCell<T>(::core::cell::UnsafeCell<T>); |
115 | | |
116 | | // loom's UnsafeCell has a slightly different API than the standard library UnsafeCell. |
117 | | // Since we want the rest of the code to be agnostic to whether it's running under loom or |
118 | | // not, we write this small wrapper that provides the loom-supported API for the standard |
119 | | // library UnsafeCell. This is also what the loom documentation recommends: |
120 | | // https://github.com/tokio-rs/loom#handling-loom-api-differences |
121 | | impl<T> UnsafeCell<T> { |
122 | | #[inline] |
123 | 427 | pub(crate) fn new(data: T) -> UnsafeCell<T> { |
124 | 427 | UnsafeCell(::core::cell::UnsafeCell::new(data)) |
125 | 427 | } <crossbeam_epoch::primitive::cell::UnsafeCell<core::mem::manually_drop::ManuallyDrop<crossbeam_epoch::collector::Collector>>>::new Line | Count | Source | 123 | 145 | pub(crate) fn new(data: T) -> UnsafeCell<T> { | 124 | 145 | UnsafeCell(::core::cell::UnsafeCell::new(data)) | 125 | 145 | } |
<crossbeam_epoch::primitive::cell::UnsafeCell<crossbeam_epoch::internal::Bag>>::new Line | Count | Source | 123 | 146 | pub(crate) fn new(data: T) -> UnsafeCell<T> { | 124 | 146 | UnsafeCell(::core::cell::UnsafeCell::new(data)) | 125 | 146 | } |
<crossbeam_epoch::primitive::cell::UnsafeCell<crossbeam_epoch::internal::Bag>>::new Line | Count | Source | 123 | 69 | pub(crate) fn new(data: T) -> UnsafeCell<T> { | 124 | 69 | UnsafeCell(::core::cell::UnsafeCell::new(data)) | 125 | 69 | } |
<crossbeam_epoch::primitive::cell::UnsafeCell<core::mem::manually_drop::ManuallyDrop<crossbeam_epoch::collector::Collector>>>::new Line | Count | Source | 123 | 67 | pub(crate) fn new(data: T) -> UnsafeCell<T> { | 124 | 67 | UnsafeCell(::core::cell::UnsafeCell::new(data)) | 125 | 67 | } |
|
126 | | |
127 | | #[inline] |
128 | 13.8M | pub(crate) fn with<R>(&self, f: impl FnOnce(*const T) -> R) -> R { |
129 | 13.8M | f(self.0.get()) |
130 | 13.8M | } <crossbeam_epoch::primitive::cell::UnsafeCell<core::mem::manually_drop::ManuallyDrop<crossbeam_epoch::collector::Collector>>>::with::<&crossbeam_epoch::collector::Collector, <crossbeam_epoch::internal::Local>::finalize::{closure#1}> Line | Count | Source | 128 | 146 | pub(crate) fn with<R>(&self, f: impl FnOnce(*const T) -> R) -> R { | 129 | 146 | f(self.0.get()) | 130 | 146 | } |
<crossbeam_epoch::primitive::cell::UnsafeCell<core::mem::manually_drop::ManuallyDrop<crossbeam_epoch::collector::Collector>>>::with::<&crossbeam_epoch::collector::Collector, <crossbeam_epoch::internal::Local>::collector::{closure#0}> Line | Count | Source | 128 | 1.01M | pub(crate) fn with<R>(&self, f: impl FnOnce(*const T) -> R) -> R { | 129 | 1.01M | f(self.0.get()) | 130 | 1.01M | } |
<crossbeam_epoch::primitive::cell::UnsafeCell<crossbeam_epoch::internal::Bag>>::with::<bool, crossbeam_epoch::collector::tests::flush_local_bag::{closure#0}> Line | Count | Source | 128 | 298 | pub(crate) fn with<R>(&self, f: impl FnOnce(*const T) -> R) -> R { | 129 | 298 | f(self.0.get()) | 130 | 298 | } |
<crossbeam_epoch::primitive::cell::UnsafeCell<crossbeam_epoch::internal::Bag>>::with::<bool, crossbeam_epoch::collector::tests::flush_local_bag::{closure#1}> Line | Count | Source | 128 | 100 | pub(crate) fn with<R>(&self, f: impl FnOnce(*const T) -> R) -> R { | 129 | 100 | f(self.0.get()) | 130 | 100 | } |
<crossbeam_epoch::primitive::cell::UnsafeCell<crossbeam_epoch::internal::Bag>>::with::<bool, crossbeam_epoch::collector::tests::garbage_buffering::{closure#0}> Line | Count | Source | 128 | 1 | pub(crate) fn with<R>(&self, f: impl FnOnce(*const T) -> R) -> R { | 129 | 1 | f(self.0.get()) | 130 | 1 | } |
<crossbeam_epoch::primitive::cell::UnsafeCell<core::mem::manually_drop::ManuallyDrop<crossbeam_epoch::collector::Collector>>>::with::<&crossbeam_epoch::collector::Collector, <crossbeam_epoch::internal::Local>::finalize::{closure#1}> Line | Count | Source | 128 | 70 | pub(crate) fn with<R>(&self, f: impl FnOnce(*const T) -> R) -> R { | 129 | 70 | f(self.0.get()) | 130 | 70 | } |
<crossbeam_epoch::primitive::cell::UnsafeCell<core::mem::manually_drop::ManuallyDrop<crossbeam_epoch::collector::Collector>>>::with::<&crossbeam_epoch::collector::Collector, <crossbeam_epoch::internal::Local>::collector::{closure#0}> Line | Count | Source | 128 | 12.8M | pub(crate) fn with<R>(&self, f: impl FnOnce(*const T) -> R) -> R { | 129 | 12.8M | f(self.0.get()) | 130 | 12.8M | } |
|
131 | | |
132 | | #[inline] |
133 | 3.80M | pub(crate) fn with_mut<R>(&self, f: impl FnOnce(*mut T) -> R) -> R { |
134 | 3.80M | f(self.0.get()) |
135 | 3.80M | } <crossbeam_epoch::primitive::cell::UnsafeCell<crossbeam_epoch::internal::Bag>>::with_mut::<&mut crossbeam_epoch::internal::Bag, <crossbeam_epoch::internal::Local>::flush::{closure#0}> Line | Count | Source | 133 | 76 | pub(crate) fn with_mut<R>(&self, f: impl FnOnce(*mut T) -> R) -> R { | 134 | 76 | f(self.0.get()) | 135 | 76 | } |
<crossbeam_epoch::primitive::cell::UnsafeCell<crossbeam_epoch::internal::Bag>>::with_mut::<&mut crossbeam_epoch::internal::Bag, <crossbeam_epoch::internal::Local>::finalize::{closure#0}> Line | Count | Source | 133 | 145 | pub(crate) fn with_mut<R>(&self, f: impl FnOnce(*mut T) -> R) -> R { | 134 | 145 | f(self.0.get()) | 135 | 145 | } |
<crossbeam_epoch::primitive::cell::UnsafeCell<crossbeam_epoch::internal::Bag>>::with_mut::<&mut crossbeam_epoch::internal::Bag, <crossbeam_epoch::internal::Local>::defer::{closure#0}> Line | Count | Source | 133 | 343 | pub(crate) fn with_mut<R>(&self, f: impl FnOnce(*mut T) -> R) -> R { | 134 | 343 | f(self.0.get()) | 135 | 343 | } |
<crossbeam_epoch::primitive::cell::UnsafeCell<crossbeam_epoch::internal::Bag>>::with_mut::<&mut crossbeam_epoch::internal::Bag, <crossbeam_epoch::internal::Local>::defer::{closure#0}> Line | Count | Source | 133 | 3.80M | pub(crate) fn with_mut<R>(&self, f: impl FnOnce(*mut T) -> R) -> R { | 134 | 3.80M | f(self.0.get()) | 135 | 3.80M | } |
<crossbeam_epoch::primitive::cell::UnsafeCell<crossbeam_epoch::internal::Bag>>::with_mut::<&mut crossbeam_epoch::internal::Bag, <crossbeam_epoch::internal::Local>::flush::{closure#0}> Line | Count | Source | 133 | 204 | pub(crate) fn with_mut<R>(&self, f: impl FnOnce(*mut T) -> R) -> R { | 134 | 204 | f(self.0.get()) | 135 | 204 | } |
<crossbeam_epoch::primitive::cell::UnsafeCell<crossbeam_epoch::internal::Bag>>::with_mut::<&mut crossbeam_epoch::internal::Bag, <crossbeam_epoch::internal::Local>::finalize::{closure#0}> Line | Count | Source | 133 | 70 | pub(crate) fn with_mut<R>(&self, f: impl FnOnce(*mut T) -> R) -> R { | 134 | 70 | f(self.0.get()) | 135 | 70 | } |
|
136 | | } |
137 | | } |
138 | | #[cfg(any(feature = "alloc", feature = "std"))] |
139 | | pub(crate) mod sync { |
140 | | pub(crate) mod atomic { |
141 | | pub(crate) use core::sync::atomic::compiler_fence; |
142 | | pub(crate) use core::sync::atomic::fence; |
143 | | pub(crate) use core::sync::atomic::AtomicUsize; |
144 | | } |
145 | | #[cfg_attr(feature = "nightly", cfg(target_has_atomic = "ptr"))] |
146 | | pub(crate) use alloc::sync::Arc; |
147 | | } |
148 | | |
149 | | #[cfg(feature = "std")] |
150 | | pub(crate) use std::thread_local; |
151 | | |
152 | | #[cfg(feature = "std")] |
153 | | pub(crate) use lazy_static::lazy_static; |
154 | | } |
155 | | |
156 | | #[cfg_attr(feature = "nightly", cfg(target_has_atomic = "ptr"))] |
157 | | cfg_if! { |
158 | | if #[cfg(feature = "alloc")] { |
159 | | extern crate alloc; |
160 | | |
161 | | mod atomic; |
162 | | mod collector; |
163 | | mod deferred; |
164 | | mod epoch; |
165 | | mod guard; |
166 | | mod internal; |
167 | | mod sync; |
168 | | |
169 | | pub use self::atomic::{ |
170 | | Pointable, Atomic, CompareExchangeError, |
171 | | Owned, Pointer, Shared, |
172 | | }; |
173 | | pub use self::collector::{Collector, LocalHandle}; |
174 | | pub use self::guard::{unprotected, Guard}; |
175 | | |
176 | | #[allow(deprecated)] |
177 | | pub use self::atomic::{CompareAndSetError, CompareAndSetOrdering}; |
178 | | } |
179 | | } |
180 | | |
181 | | cfg_if! { |
182 | | if #[cfg(feature = "std")] { |
183 | | mod default; |
184 | | pub use self::default::{default_collector, is_pinned, pin}; |
185 | | } |
186 | | } |