vasi_sync/
sync.rs
1#[cfg(not(loom))]
9pub use core::{
10 sync::atomic,
11 sync::atomic::{AtomicBool, AtomicI8, AtomicI32, AtomicU32, AtomicUsize, Ordering},
12};
13#[cfg(loom)]
14use std::collections::HashMap;
15
16#[cfg(not(loom))]
17pub use core::cell::Cell;
18#[cfg(loom)]
19pub use loom::cell::Cell;
20
21#[cfg(loom)]
24use loom::sync::{Condvar, Mutex};
25#[cfg(loom)]
26pub use loom::{
27 sync::Arc,
28 sync::atomic,
29 sync::atomic::{AtomicBool, AtomicI8, AtomicI32, AtomicU32, AtomicUsize, Ordering},
30};
31#[cfg(not(loom))]
32use vasi::VirtualAddressSpaceIndependent;
33#[cfg(loom)]
34loom::lazy_static! {
35 pub static ref FUTEXES: Mutex<HashMap<usize, Arc<Condvar>>> = Mutex::new(HashMap::new());
36}
37
38#[cfg(not(loom))]
39pub fn sched_yield() {
40 rustix::process::sched_yield();
41}
42#[cfg(loom)]
43pub fn sched_yield() {
44 loom::thread::yield_now();
45}
46
47enum FutexOperation {
50 Wait,
51 Wake,
52}
53
54#[cfg(not(loom))]
55unsafe fn futex(
56 futex_word: &AtomicU32,
57 futex_operation: FutexOperation,
58 val: u32,
59) -> rustix::io::Result<usize> {
60 #[cfg(not(miri))]
61 {
62 let futex_operation = match futex_operation {
63 FutexOperation::Wait => rustix::thread::FutexOperation::Wait,
64 FutexOperation::Wake => rustix::thread::FutexOperation::Wake,
65 };
66
67 unsafe {
68 rustix::thread::futex(
69 futex_word.as_ptr(),
70 futex_operation,
71 rustix::thread::FutexFlags::empty(),
72 val,
73 core::ptr::null(),
74 core::ptr::null_mut(),
75 0u32,
76 )
77 }
78 }
79 #[cfg(miri)]
82 {
83 let futex_operation = match futex_operation {
84 FutexOperation::Wait => libc::FUTEX_WAIT,
85 FutexOperation::Wake => libc::FUTEX_WAKE,
86 };
87 let rv = unsafe {
88 libc::syscall(
89 libc::SYS_futex,
90 futex_word.as_ptr(),
91 futex_operation,
92 val,
93 core::ptr::null() as *const libc::timespec,
94 core::ptr::null_mut() as *mut u32,
95 0u32,
96 )
97 };
98 if rv >= 0 {
99 Ok(rv.try_into().unwrap())
100 } else {
101 Err(rustix::io::Errno::from_raw_os_error(unsafe {
102 *libc::__errno_location()
103 }))
104 }
105 }
106}
107
108#[inline]
109pub fn futex_wait(futex_word: &AtomicU32, val: u32) -> rustix::io::Result<usize> {
110 #[cfg(not(loom))]
113 {
114 unsafe { futex(futex_word, FutexOperation::Wait, val) }
115 }
116 #[cfg(loom)]
117 {
118 let mut hashmap = FUTEXES.lock().unwrap();
128 let futex_word_val = futex_word.load(Ordering::Relaxed);
129 if futex_word_val != val {
130 return Err(rustix::io::Errno::AGAIN);
131 }
132 let condvar = hashmap
133 .entry(std::ptr::from_ref(futex_word) as usize)
134 .or_insert(Arc::new(Condvar::new()))
135 .clone();
136 condvar.wait(hashmap).unwrap();
139 Ok(0)
140 }
141}
142
143#[inline]
144pub fn futex_wake_one(futex_word: &AtomicU32) -> rustix::io::Result<()> {
145 #[cfg(not(loom))]
146 {
147 unsafe { futex(futex_word, FutexOperation::Wake, 1) }.map(|_| ())
148 }
149 #[cfg(loom)]
151 {
152 let hashmap = FUTEXES.lock().unwrap();
153 let Some(condvar) = hashmap.get(&(std::ptr::from_ref(futex_word) as usize)) else {
154 return Ok(());
155 };
156 condvar.notify_one();
157 Ok(())
158 }
159}
160
161#[inline]
162pub fn futex_wake_all(futex_word: &AtomicU32) -> rustix::io::Result<()> {
163 #[cfg(not(loom))]
164 {
165 unsafe {
168 futex(
169 futex_word,
170 FutexOperation::Wake,
171 u32::try_from(i32::MAX).unwrap(),
172 )
173 }
174 .map(|_| ())
175 }
176 #[cfg(loom)]
178 {
179 let hashmap = FUTEXES.lock().unwrap();
180 let Some(condvar) = hashmap.get(&(std::ptr::from_ref(futex_word) as usize)) else {
181 return Ok(());
182 };
183 condvar.notify_all();
184 Ok(())
185 }
186}
187
188#[cfg(not(loom))]
189pub struct MutPtr<T: ?Sized>(*mut T);
190#[cfg(not(loom))]
191impl<T: ?Sized> MutPtr<T> {
192 #[inline]
196 #[allow(clippy::mut_from_ref)]
197 pub unsafe fn deref(&self) -> &mut T {
198 unsafe { &mut *self.0 }
199 }
200
201 #[inline]
202 pub fn with<F, R>(&self, f: F) -> R
203 where
204 F: FnOnce(*mut T) -> R,
205 {
206 f(self.0)
207 }
208}
209#[cfg(loom)]
212pub struct MutPtr<T: ?Sized>(loom::cell::MutPtr<T>);
213#[cfg(loom)]
214impl<T: ?Sized> MutPtr<T> {
215 #[inline]
216 #[allow(clippy::mut_from_ref)]
217 pub unsafe fn deref(&self) -> &mut T {
218 unsafe { self.0.deref() }
219 }
220
221 #[inline]
222 pub fn with<F, R>(&self, f: F) -> R
223 where
224 F: FnOnce(*mut T) -> R,
225 {
226 self.0.with(f)
227 }
228}
229
230unsafe impl<T: ?Sized> Send for MutPtr<T> where T: Send {}
231
232#[cfg(not(loom))]
233pub struct ConstPtr<T: ?Sized>(*const T);
234#[cfg(not(loom))]
235impl<T: ?Sized> ConstPtr<T> {
236 pub unsafe fn deref(&self) -> &T {
240 unsafe { &*self.0 }
241 }
242
243 pub fn with<F, R>(&self, f: F) -> R
244 where
245 F: FnOnce(*const T) -> R,
246 {
247 f(self.0)
248 }
249}
250
251#[cfg(loom)]
252pub use loom::cell::ConstPtr;
253
254#[cfg(not(loom))]
256#[derive(Debug, VirtualAddressSpaceIndependent)]
257#[repr(transparent)]
258pub struct UnsafeCell<T>(core::cell::UnsafeCell<T>);
259#[cfg(not(loom))]
260impl<T> UnsafeCell<T> {
261 #[inline]
262 pub const fn new(data: T) -> UnsafeCell<T> {
263 UnsafeCell(core::cell::UnsafeCell::new(data))
264 }
265
266 #[inline]
270 pub fn get_mut(&self) -> MutPtr<T> {
271 MutPtr(self.0.get())
272 }
273
274 #[inline]
275 pub fn get(&self) -> ConstPtr<T> {
276 ConstPtr(self.0.get())
277 }
278
279 pub fn untracked_get(&self) -> *mut T {
285 self.0.get()
286 }
287}
288#[cfg(loom)]
289#[derive(Debug)]
290pub struct UnsafeCell<T>(loom::cell::UnsafeCell<T>);
291#[cfg(loom)]
292impl<T> UnsafeCell<T> {
293 pub fn new(data: T) -> UnsafeCell<T> {
295 UnsafeCell(loom::cell::UnsafeCell::new(data))
296 }
297
298 pub fn get_mut(&self) -> MutPtr<T> {
299 MutPtr(self.0.get_mut())
300 }
301
302 pub fn get(&self) -> ConstPtr<T> {
303 self.0.get()
304 }
305}
306
307#[cfg(loom)]
309pub fn loom_reset() {
310 FUTEXES.lock().unwrap().clear();
311}