atomic_refcell/
lib.rs

1//! Implements a container type providing RefCell-like semantics for objects
2//! shared across threads.
3//!
4//! RwLock is traditionally considered to be the |Sync| analogue of RefCell.
5//! However, for consumers that can guarantee that they will never mutably
6//! borrow the contents concurrently with immutable borrows, an RwLock is
7//! overkill, and has key disadvantages:
8//! * Performance: Even the fastest existing implementation of RwLock (that of
9//!   parking_lot) performs at least two atomic operations during immutable
10//!   borrows. This makes mutable borrows significantly cheaper than immutable
11//!   borrows, leading to weird incentives when writing performance-critical
12//!   code.
13//! * Features: Implementing AtomicRefCell on top of RwLock makes it impossible
14//!   to implement useful things like AtomicRef{,Mut}::map.
15//!
16//! As such, we re-implement RefCell semantics from scratch with a single atomic
17//! reference count. The primary complication of this scheme relates to keeping
18//! things in a consistent state when one thread performs an illegal borrow and
19//! panics. Since an AtomicRefCell can be accessed by multiple threads, and since
20//! panics are recoverable, we need to ensure that an illegal (panicking) access by
21//! one thread does not lead to undefined behavior on other, still-running threads.
22//!
23//! So we represent things as follows:
24//! * Any value with the high bit set (so half the total refcount space) indicates
25//!   a mutable borrow.
26//! * Mutable borrows perform an atomic compare-and-swap, swapping in the high bit
27//!   if the current value is zero. If the current value is non-zero, the thread
28//!   panics and the value is left undisturbed.
29//! * Immutable borrows perform an atomic increment. If the new value has the high
30//!   bit set, the thread panics. The incremented refcount is left as-is, since it
31//!   still represents a valid mutable borrow. When the mutable borrow is released,
32//!   the refcount is set unconditionally to zero, clearing any stray increments by
33//!   panicked threads.
34//!
35//! There are a few additional purely-academic complications to handle overflow,
36//! which are documented in the implementation.
37//!
38//! The rest of this module is mostly derived by copy-pasting the implementation of
39//! RefCell and fixing things up as appropriate. Certain non-threadsafe methods
40//! have been removed. We segment the concurrency logic from the rest of the code to
41//! keep the tricky parts small and easy to audit.
42
43#![no_std]
44#![allow(unsafe_code)]
45#![deny(missing_docs)]
46
47use core::cell::UnsafeCell;
48use core::cmp;
49use core::fmt;
50use core::fmt::{Debug, Display};
51use core::marker::PhantomData;
52use core::ops::{Deref, DerefMut};
53use core::ptr::NonNull;
54use core::sync::atomic;
55use core::sync::atomic::AtomicUsize;
56
57#[cfg(feature = "serde")]
58extern crate serde;
59#[cfg(feature = "serde")]
60use serde::{Deserialize, Serialize};
61
62/// A threadsafe analogue to RefCell.
63pub struct AtomicRefCell<T: ?Sized> {
64    borrow: AtomicUsize,
65    value: UnsafeCell<T>,
66}
67
68/// An error returned by [`AtomicRefCell::try_borrow`](struct.AtomicRefCell.html#method.try_borrow).
69pub struct BorrowError {
70    _private: (),
71}
72
73impl Debug for BorrowError {
74    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
75        f.debug_struct("BorrowError").finish()
76    }
77}
78
79impl Display for BorrowError {
80    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
81        Display::fmt("already mutably borrowed", f)
82    }
83}
84
85/// An error returned by [`AtomicRefCell::try_borrow_mut`](struct.AtomicRefCell.html#method.try_borrow_mut).
86pub struct BorrowMutError {
87    _private: (),
88}
89
90impl Debug for BorrowMutError {
91    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
92        f.debug_struct("BorrowMutError").finish()
93    }
94}
95
96impl Display for BorrowMutError {
97    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
98        Display::fmt("already borrowed", f)
99    }
100}
101
102impl<T> AtomicRefCell<T> {
103    /// Creates a new `AtomicRefCell` containing `value`.
104    #[inline]
105    pub const fn new(value: T) -> AtomicRefCell<T> {
106        AtomicRefCell {
107            borrow: AtomicUsize::new(0),
108            value: UnsafeCell::new(value),
109        }
110    }
111
112    /// Consumes the `AtomicRefCell`, returning the wrapped value.
113    #[inline]
114    pub fn into_inner(self) -> T {
115        debug_assert!(self.borrow.load(atomic::Ordering::Acquire) == 0);
116        self.value.into_inner()
117    }
118}
119
120impl<T: ?Sized> AtomicRefCell<T> {
121    /// Immutably borrows the wrapped value.
122    #[inline]
123    pub fn borrow(&self) -> AtomicRef<T> {
124        match AtomicBorrowRef::try_new(&self.borrow) {
125            Ok(borrow) => AtomicRef {
126                value: unsafe { NonNull::new_unchecked(self.value.get()) },
127                borrow,
128            },
129            Err(s) => panic!("{}", s),
130        }
131    }
132
133    /// Attempts to immutably borrow the wrapped value, but instead of panicking
134    /// on a failed borrow, returns `Err`.
135    #[inline]
136    pub fn try_borrow(&self) -> Result<AtomicRef<T>, BorrowError> {
137        match AtomicBorrowRef::try_new(&self.borrow) {
138            Ok(borrow) => Ok(AtomicRef {
139                value: unsafe { NonNull::new_unchecked(self.value.get()) },
140                borrow,
141            }),
142            Err(_) => Err(BorrowError { _private: () }),
143        }
144    }
145
146    /// Mutably borrows the wrapped value.
147    #[inline]
148    pub fn borrow_mut(&self) -> AtomicRefMut<T> {
149        match AtomicBorrowRefMut::try_new(&self.borrow) {
150            Ok(borrow) => AtomicRefMut {
151                value: unsafe { NonNull::new_unchecked(self.value.get()) },
152                borrow,
153                marker: PhantomData,
154            },
155            Err(s) => panic!("{}", s),
156        }
157    }
158
159    /// Attempts to mutably borrow the wrapped value, but instead of panicking
160    /// on a failed borrow, returns `Err`.
161    #[inline]
162    pub fn try_borrow_mut(&self) -> Result<AtomicRefMut<T>, BorrowMutError> {
163        match AtomicBorrowRefMut::try_new(&self.borrow) {
164            Ok(borrow) => Ok(AtomicRefMut {
165                value: unsafe { NonNull::new_unchecked(self.value.get()) },
166                borrow,
167                marker: PhantomData,
168            }),
169            Err(_) => Err(BorrowMutError { _private: () }),
170        }
171    }
172
173    /// Returns a raw pointer to the underlying data in this cell.
174    ///
175    /// External synchronization is needed to avoid data races when dereferencing
176    /// the pointer.
177    #[inline]
178    pub fn as_ptr(&self) -> *mut T {
179        self.value.get()
180    }
181
182    /// Returns a mutable reference to the wrapped value.
183    ///
184    /// No runtime checks take place (unless debug assertions are enabled)
185    /// because this call borrows `AtomicRefCell` mutably at compile-time.
186    #[inline]
187    pub fn get_mut(&mut self) -> &mut T {
188        debug_assert!(self.borrow.load(atomic::Ordering::Acquire) == 0);
189        unsafe { &mut *self.value.get() }
190    }
191}
192
193//
194// Core synchronization logic. Keep this section small and easy to audit.
195//
196
197const HIGH_BIT: usize = !(::core::usize::MAX >> 1);
198const MAX_FAILED_BORROWS: usize = HIGH_BIT + (HIGH_BIT >> 1);
199
200struct AtomicBorrowRef<'b> {
201    borrow: &'b AtomicUsize,
202}
203
204impl<'b> AtomicBorrowRef<'b> {
205    #[inline]
206    fn try_new(borrow: &'b AtomicUsize) -> Result<Self, &'static str> {
207        let new = borrow.fetch_add(1, atomic::Ordering::Acquire) + 1;
208        if new & HIGH_BIT != 0 {
209            // If the new count has the high bit set, that almost certainly
210            // means there's an pre-existing mutable borrow. In that case,
211            // we simply leave the increment as a benign side-effect and
212            // return `Err`. Once the mutable borrow is released, the
213            // count will be reset to zero unconditionally.
214            //
215            // The overflow check here ensures that an unbounded number of
216            // immutable borrows during the scope of one mutable borrow
217            // will soundly trigger a panic (or abort) rather than UB.
218            Self::check_overflow(borrow, new);
219            Err("already mutably borrowed")
220        } else {
221            Ok(AtomicBorrowRef { borrow: borrow })
222        }
223    }
224
225    #[cold]
226    #[inline(never)]
227    fn check_overflow(borrow: &'b AtomicUsize, new: usize) {
228        if new == HIGH_BIT {
229            // We overflowed into the reserved upper half of the refcount
230            // space. Before panicking, decrement the refcount to leave things
231            // in a consistent immutable-borrow state.
232            //
233            // This can basically only happen if somebody forget()s AtomicRefs
234            // in a tight loop.
235            borrow.fetch_sub(1, atomic::Ordering::Release);
236            panic!("too many immutable borrows");
237        } else if new >= MAX_FAILED_BORROWS {
238            // During the mutable borrow, an absurd number of threads have
239            // attempted to increment the refcount with immutable borrows.
240            // To avoid hypothetically wrapping the refcount, we abort the
241            // process once a certain threshold is reached.
242            //
243            // This requires billions of borrows to fail during the scope of
244            // one mutable borrow, and so is very unlikely to happen in a real
245            // program.
246            //
247            // To avoid a potential unsound state after overflowing, we make
248            // sure the entire process aborts.
249            //
250            // Right now, there's no stable way to do that without `std`:
251            // https://github.com/rust-lang/rust/issues/67952
252            // As a workaround, we cause an abort by making this thread panic
253            // during the unwinding of another panic.
254            //
255            // On platforms where the panic strategy is already 'abort', the
256            // ForceAbort object here has no effect, as the program already
257            // panics before it is dropped.
258            struct ForceAbort;
259            impl Drop for ForceAbort {
260                fn drop(&mut self) {
261                    panic!("Aborting to avoid unsound state of AtomicRefCell");
262                }
263            }
264            let _abort = ForceAbort;
265            panic!("Too many failed borrows");
266        }
267    }
268}
269
270impl<'b> Drop for AtomicBorrowRef<'b> {
271    #[inline]
272    fn drop(&mut self) {
273        let old = self.borrow.fetch_sub(1, atomic::Ordering::Release);
274        // This assertion is technically incorrect in the case where another
275        // thread hits the hypothetical overflow case, since we might observe
276        // the refcount before it fixes it up (and panics). But that never will
277        // never happen in a real program, and this is a debug_assert! anyway.
278        debug_assert!(old & HIGH_BIT == 0);
279    }
280}
281
282struct AtomicBorrowRefMut<'b> {
283    borrow: &'b AtomicUsize,
284}
285
286impl<'b> Drop for AtomicBorrowRefMut<'b> {
287    #[inline]
288    fn drop(&mut self) {
289        self.borrow.store(0, atomic::Ordering::Release);
290    }
291}
292
293impl<'b> AtomicBorrowRefMut<'b> {
294    #[inline]
295    fn try_new(borrow: &'b AtomicUsize) -> Result<AtomicBorrowRefMut<'b>, &'static str> {
296        // Use compare-and-swap to avoid corrupting the immutable borrow count
297        // on illegal mutable borrows.
298        let old = match borrow.compare_exchange(
299            0,
300            HIGH_BIT,
301            atomic::Ordering::Acquire,
302            atomic::Ordering::Relaxed,
303        ) {
304            Ok(x) => x,
305            Err(x) => x,
306        };
307
308        if old == 0 {
309            Ok(AtomicBorrowRefMut { borrow })
310        } else if old & HIGH_BIT == 0 {
311            Err("already immutably borrowed")
312        } else {
313            Err("already mutably borrowed")
314        }
315    }
316}
317
318unsafe impl<T: ?Sized + Send> Send for AtomicRefCell<T> {}
319unsafe impl<T: ?Sized + Send + Sync> Sync for AtomicRefCell<T> {}
320
321//
322// End of core synchronization logic. No tricky thread stuff allowed below
323// this point.
324//
325
326impl<T: Clone> Clone for AtomicRefCell<T> {
327    #[inline]
328    fn clone(&self) -> AtomicRefCell<T> {
329        AtomicRefCell::new(self.borrow().clone())
330    }
331}
332
333impl<T: Default> Default for AtomicRefCell<T> {
334    #[inline]
335    fn default() -> AtomicRefCell<T> {
336        AtomicRefCell::new(Default::default())
337    }
338}
339
340impl<T: ?Sized + PartialEq> PartialEq for AtomicRefCell<T> {
341    #[inline]
342    fn eq(&self, other: &AtomicRefCell<T>) -> bool {
343        *self.borrow() == *other.borrow()
344    }
345}
346
347impl<T: ?Sized + Eq> Eq for AtomicRefCell<T> {}
348
349impl<T: ?Sized + PartialOrd> PartialOrd for AtomicRefCell<T> {
350    #[inline]
351    fn partial_cmp(&self, other: &AtomicRefCell<T>) -> Option<cmp::Ordering> {
352        self.borrow().partial_cmp(&*other.borrow())
353    }
354}
355
356impl<T: ?Sized + Ord> Ord for AtomicRefCell<T> {
357    #[inline]
358    fn cmp(&self, other: &AtomicRefCell<T>) -> cmp::Ordering {
359        self.borrow().cmp(&*other.borrow())
360    }
361}
362
363impl<T> From<T> for AtomicRefCell<T> {
364    fn from(t: T) -> AtomicRefCell<T> {
365        AtomicRefCell::new(t)
366    }
367}
368
369impl<'b> Clone for AtomicBorrowRef<'b> {
370    #[inline]
371    fn clone(&self) -> AtomicBorrowRef<'b> {
372        AtomicBorrowRef::try_new(self.borrow).unwrap()
373    }
374}
375
376/// A wrapper type for an immutably borrowed value from an `AtomicRefCell<T>`.
377pub struct AtomicRef<'b, T: ?Sized + 'b> {
378    value: NonNull<T>,
379    borrow: AtomicBorrowRef<'b>,
380}
381
382// SAFETY: `AtomicRef<'_, T> acts as a reference. `AtomicBorrowRef` is a
383// reference to an atomic.
384unsafe impl<'b, T: ?Sized> Sync for AtomicRef<'b, T> where for<'a> &'a T: Sync {}
385unsafe impl<'b, T: ?Sized> Send for AtomicRef<'b, T> where for<'a> &'a T: Send {}
386
387impl<'b, T: ?Sized> Deref for AtomicRef<'b, T> {
388    type Target = T;
389
390    #[inline]
391    fn deref(&self) -> &T {
392        // SAFETY: We hold shared borrow of the value.
393        unsafe { self.value.as_ref() }
394    }
395}
396
397impl<'b, T: ?Sized> AtomicRef<'b, T> {
398    /// Copies an `AtomicRef`.
399    #[inline]
400    pub fn clone(orig: &AtomicRef<'b, T>) -> AtomicRef<'b, T> {
401        AtomicRef {
402            value: orig.value,
403            borrow: orig.borrow.clone(),
404        }
405    }
406
407    /// Make a new `AtomicRef` for a component of the borrowed data.
408    #[inline]
409    pub fn map<U: ?Sized, F>(orig: AtomicRef<'b, T>, f: F) -> AtomicRef<'b, U>
410    where
411        F: FnOnce(&T) -> &U,
412    {
413        AtomicRef {
414            value: NonNull::from(f(&*orig)),
415            borrow: orig.borrow,
416        }
417    }
418
419    /// Make a new `AtomicRef` for an optional component of the borrowed data.
420    #[inline]
421    pub fn filter_map<U: ?Sized, F>(orig: AtomicRef<'b, T>, f: F) -> Option<AtomicRef<'b, U>>
422    where
423        F: FnOnce(&T) -> Option<&U>,
424    {
425        Some(AtomicRef {
426            value: NonNull::from(f(&*orig)?),
427            borrow: orig.borrow,
428        })
429    }
430}
431
432impl<'b, T: ?Sized> AtomicRefMut<'b, T> {
433    /// Make a new `AtomicRefMut` for a component of the borrowed data, e.g. an enum
434    /// variant.
435    #[inline]
436    pub fn map<U: ?Sized, F>(mut orig: AtomicRefMut<'b, T>, f: F) -> AtomicRefMut<'b, U>
437    where
438        F: FnOnce(&mut T) -> &mut U,
439    {
440        AtomicRefMut {
441            value: NonNull::from(f(&mut *orig)),
442            borrow: orig.borrow,
443            marker: PhantomData,
444        }
445    }
446
447    /// Make a new `AtomicRefMut` for an optional component of the borrowed data.
448    #[inline]
449    pub fn filter_map<U: ?Sized, F>(
450        mut orig: AtomicRefMut<'b, T>,
451        f: F,
452    ) -> Option<AtomicRefMut<'b, U>>
453    where
454        F: FnOnce(&mut T) -> Option<&mut U>,
455    {
456        Some(AtomicRefMut {
457            value: NonNull::from(f(&mut *orig)?),
458            borrow: orig.borrow,
459            marker: PhantomData,
460        })
461    }
462}
463
464/// A wrapper type for a mutably borrowed value from an `AtomicRefCell<T>`.
465pub struct AtomicRefMut<'b, T: ?Sized + 'b> {
466    value: NonNull<T>,
467    borrow: AtomicBorrowRefMut<'b>,
468    // `NonNull` is covariant over `T`, but this is used in place of a mutable
469    // reference so we need to be invariant over `T`.
470    marker: PhantomData<&'b mut T>,
471}
472
473// SAFETY: `AtomicRefMut<'_, T> acts as a mutable reference.
474// `AtomicBorrowRefMut` is a reference to an atomic.
475unsafe impl<'b, T: ?Sized> Sync for AtomicRefMut<'b, T> where for<'a> &'a mut T: Sync {}
476unsafe impl<'b, T: ?Sized> Send for AtomicRefMut<'b, T> where for<'a> &'a mut T: Send {}
477
478impl<'b, T: ?Sized> Deref for AtomicRefMut<'b, T> {
479    type Target = T;
480
481    #[inline]
482    fn deref(&self) -> &T {
483        // SAFETY: We hold an exclusive borrow of the value.
484        unsafe { self.value.as_ref() }
485    }
486}
487
488impl<'b, T: ?Sized> DerefMut for AtomicRefMut<'b, T> {
489    #[inline]
490    fn deref_mut(&mut self) -> &mut T {
491        // SAFETY: We hold an exclusive borrow of the value.
492        unsafe { self.value.as_mut() }
493    }
494}
495
496impl<'b, T: ?Sized + Debug + 'b> Debug for AtomicRef<'b, T> {
497    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
498        <T as Debug>::fmt(self, f)
499    }
500}
501
502impl<'b, T: ?Sized + Debug + 'b> Debug for AtomicRefMut<'b, T> {
503    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
504        <T as Debug>::fmt(self, f)
505    }
506}
507
508impl<T: ?Sized + Debug> Debug for AtomicRefCell<T>  {
509    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
510        match self.try_borrow() {
511            Ok(borrow) => f.debug_struct("AtomicRefCell").field("value", &borrow).finish(),
512            Err(_) => {
513                // The RefCell is mutably borrowed so we can't look at its value
514                // here. Show a placeholder instead.
515                struct BorrowedPlaceholder;
516
517                impl Debug for BorrowedPlaceholder {
518                    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
519                        f.write_str("<borrowed>")
520                    }
521                }
522
523                f.debug_struct("AtomicRefCell").field("value", &BorrowedPlaceholder).finish()
524            }
525        }
526    }
527}
528
529#[cfg(feature = "serde")]
530impl<'de, T: Deserialize<'de>> Deserialize<'de> for AtomicRefCell<T> {
531    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
532    where
533        D: serde::Deserializer<'de>,
534    {
535        T::deserialize(deserializer).map(Self::from)
536    }
537}
538
539#[cfg(feature = "serde")]
540impl<T: Serialize> Serialize for AtomicRefCell<T> {
541    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
542    where
543        S: serde::Serializer,
544    {
545        use serde::ser::Error;
546        match self.try_borrow() {
547            Ok(value) => value.serialize(serializer),
548            Err(_err) => Err(S::Error::custom("already mutably borrowed")),
549        }
550    }
551}