heapless/pool/
cas.rs
1use core::{
7 cell::UnsafeCell,
8 marker::PhantomData,
9 num::{NonZeroU32, NonZeroU64},
10 ptr::NonNull,
11 sync::atomic::{AtomicU64, Ordering},
12};
13
14pub struct Node<T> {
17 next: Atomic<Node<T>>,
18 pub(crate) data: UnsafeCell<T>,
19}
20
21impl<T> Node<T> {
22 fn next(&self) -> &Atomic<Node<T>> {
23 &self.next
24 }
25}
26
27pub struct Stack<T> {
28 head: Atomic<Node<T>>,
29}
30
31impl<T> Stack<T> {
32 pub const fn new() -> Self {
33 Self {
34 head: Atomic::null(),
35 }
36 }
37
38 pub fn push(&self, new_head: Ptr<Node<T>>) {
39 let mut head = self.head.load(Ordering::Relaxed);
40
41 loop {
42 unsafe {
43 new_head
44 .as_raw()
45 .as_ref()
46 .next()
47 .store(head, Ordering::Relaxed);
48 }
49
50 if let Err(p) = self.head.compare_and_exchange_weak(
51 head,
52 Some(new_head),
53 Ordering::Release,
54 Ordering::Relaxed,
55 ) {
56 head = p;
57 } else {
58 return;
59 }
60 }
61 }
62
63 pub fn try_pop(&self) -> Option<Ptr<Node<T>>> {
64 loop {
65 if let Some(mut head) = self.head.load(Ordering::Acquire) {
66 let next = unsafe { head.as_raw().as_ref().next().load(Ordering::Relaxed) };
67
68 if self
69 .head
70 .compare_and_exchange_weak(
71 Some(head),
72 next,
73 Ordering::Release,
74 Ordering::Relaxed,
75 )
76 .is_ok()
77 {
78 head.incr_tag();
79 return Some(head);
80 }
81 } else {
82 return None;
84 }
85 }
86 }
87}
88
89#[cfg(target_arch = "x86_64")]
90fn anchor<T>(init: Option<*mut T>) -> *mut T {
91 use core::sync::atomic::AtomicU8;
92
93 use spin::Once;
94
95 static LAZY_ANCHOR: Once<usize> = Once::new();
96
97 let likely_unaligned_address = if let Some(init) = init {
98 *LAZY_ANCHOR.call_once(|| init as usize)
99 } else {
100 LAZY_ANCHOR.get().copied().unwrap_or_else(|| {
101 static BSS_ANCHOR: AtomicU8 = AtomicU8::new(0);
103 &BSS_ANCHOR as *const _ as usize
104 })
105 };
106
107 let alignment_mask = !(core::mem::align_of::<T>() - 1);
108 let well_aligned_address = likely_unaligned_address & alignment_mask;
109 well_aligned_address as *mut T
110}
111
112pub struct Ptr<T> {
115 inner: NonZeroU64,
116 _marker: PhantomData<*mut T>,
117}
118
119impl<T> Clone for Ptr<T> {
120 fn clone(&self) -> Self {
121 *self
122 }
123}
124
125impl<T> Copy for Ptr<T> {}
126
127fn initial_tag_value() -> NonZeroU32 {
128 NonZeroU32::new(1).unwrap()
129}
130
131impl<T> Ptr<T> {
132 #[cfg(target_arch = "x86_64")]
133 pub fn new(p: *mut T) -> Option<Self> {
134 use core::convert::TryFrom;
135
136 i32::try_from((p as isize).wrapping_sub(anchor::<T>(Some(p)) as isize))
137 .ok()
138 .map(|offset| unsafe { Ptr::from_parts(initial_tag_value(), offset) })
139 }
140
141 #[cfg(target_arch = "x86")]
142 pub fn new(p: *mut T) -> Option<Self> {
143 Some(unsafe { Ptr::from_parts(initial_tag_value(), p as i32) })
144 }
145
146 unsafe fn from_parts(tag: NonZeroU32, offset: i32) -> Self {
147 Self {
148 inner: NonZeroU64::new_unchecked((tag.get() as u64) << 32 | (offset as u32 as u64)),
149 _marker: PhantomData,
150 }
151 }
152
153 fn from_u64(p: u64) -> Option<Self> {
154 NonZeroU64::new(p).map(|inner| Self {
155 inner,
156 _marker: PhantomData,
157 })
158 }
159
160 fn into_u64(&self) -> u64 {
161 self.inner.get()
162 }
163
164 fn tag(&self) -> NonZeroU32 {
165 let tag = (self.inner.get() >> 32) as u32;
166 debug_assert_ne!(0, tag, "broken non-zero invariant");
167 unsafe { NonZeroU32::new_unchecked(tag) }
168 }
169
170 fn incr_tag(&mut self) {
171 let maybe_zero_tag = self.tag().get().wrapping_add(1);
172 let tag = NonZeroU32::new(maybe_zero_tag).unwrap_or(initial_tag_value());
173 let offset = self.offset();
174
175 *self = unsafe { Ptr::from_parts(tag, offset) };
176 }
177
178 fn offset(&self) -> i32 {
179 self.inner.get() as i32
180 }
181
182 #[cfg(target_arch = "x86_64")]
183 fn as_raw(&self) -> NonNull<T> {
184 unsafe {
185 NonNull::new_unchecked(
186 (anchor::<T>(None) as isize).wrapping_add(self.offset() as isize) as *mut T,
187 )
188 }
189 }
190
191 #[cfg(target_arch = "x86")]
192 fn as_raw(&self) -> NonNull<T> {
193 unsafe { NonNull::new_unchecked(self.offset() as *mut T) }
194 }
195
196 pub fn dangling() -> Self {
197 unsafe { Self::from_parts(initial_tag_value(), 0) }
199 }
200
201 pub unsafe fn as_ref(&self) -> &T {
202 &*self.as_raw().as_ptr()
203 }
204}
205
206struct Atomic<T> {
207 inner: AtomicU64,
208 _marker: PhantomData<*mut T>,
209}
210
211impl<T> Atomic<T> {
212 const fn null() -> Self {
213 Self {
214 inner: AtomicU64::new(0),
215 _marker: PhantomData,
216 }
217 }
218
219 fn compare_and_exchange_weak(
220 &self,
221 current: Option<Ptr<T>>,
222 new: Option<Ptr<T>>,
223 succ: Ordering,
224 fail: Ordering,
225 ) -> Result<(), Option<Ptr<T>>> {
226 self.inner
227 .compare_exchange_weak(
228 current.map(|p| p.into_u64()).unwrap_or(0),
229 new.map(|p| p.into_u64()).unwrap_or(0),
230 succ,
231 fail,
232 )
233 .map(drop)
234 .map_err(Ptr::from_u64)
235 }
236
237 fn load(&self, ord: Ordering) -> Option<Ptr<T>> {
238 NonZeroU64::new(self.inner.load(ord)).map(|inner| Ptr {
239 inner,
240 _marker: PhantomData,
241 })
242 }
243
244 fn store(&self, val: Option<Ptr<T>>, ord: Ordering) {
245 self.inner
246 .store(val.map(|p| p.into_u64()).unwrap_or(0), ord)
247 }
248}