heapless/pool/
singleton.rs

1//! `Pool` as a global singleton
2
3use core::{
4    any::TypeId,
5    cmp, fmt,
6    hash::{Hash, Hasher},
7    marker::PhantomData,
8    mem::{self, MaybeUninit},
9    ops::{Deref, DerefMut},
10    ptr::{self, NonNull},
11};
12
13use super::{Init, Node, Uninit};
14
15pub mod arc;
16
17/// Instantiates a pool as a global singleton
18// NOTE(any(test)) makes testing easier (no need to enable Cargo features for testing)
19#[cfg(any(
20    armv6m,
21    armv7a,
22    armv7r,
23    armv7m,
24    armv8m_main,
25    all(
26        any(target_arch = "x86_64", target_arch = "x86"),
27        feature = "x86-sync-pool"
28    ),
29    test
30))]
31#[macro_export]
32macro_rules! pool {
33    ($(#[$($attr:tt)*])* $ident:ident: $ty:ty) => {
34        pub struct $ident;
35
36        impl $crate::pool::singleton::Pool for $ident {
37            type Data = $ty;
38
39            fn ptr() -> &'static $crate::pool::Pool<$ty> {
40                $(#[$($attr)*])*
41                static $ident: $crate::pool::Pool<$ty> = $crate::pool::Pool::new();
42
43                &$ident
44            }
45        }
46    };
47}
48
49/// A global singleton memory pool
50pub trait Pool {
51    /// The type of data that can be allocated on this pool
52    type Data: 'static;
53
54    #[doc(hidden)]
55    fn ptr() -> &'static super::Pool<Self::Data>;
56
57    /// Claims a memory block from the pool
58    ///
59    /// Returns `None` when the pool is observed as exhausted
60    ///
61    /// *NOTE:* This method does *not* have bounded execution time; i.e. it contains a CAS loop
62    fn alloc() -> Option<Box<Self, Uninit>>
63    where
64        Self: Sized,
65    {
66        Self::ptr().alloc().map(|inner| Box {
67            _pool: PhantomData,
68            inner,
69        })
70    }
71
72    /// Increases the capacity of the pool
73    ///
74    /// This method might *not* fully utilize the given memory block due to alignment requirements
75    ///
76    /// This method returns the number of *new* blocks that can be allocated.
77    fn grow(memory: &'static mut [u8]) -> usize {
78        Self::ptr().grow(memory)
79    }
80
81    /// Increases the capacity of the pool
82    ///
83    /// Unlike [`Pool.grow`](trait.Pool.html#method.grow_exact) this method fully utilizes the given
84    /// memory block
85    fn grow_exact<A>(memory: &'static mut MaybeUninit<A>) -> usize
86    where
87        A: AsMut<[Node<Self::Data>]>,
88    {
89        Self::ptr().grow_exact(memory)
90    }
91}
92
93/// A memory block that belongs to the global memory pool, `POOL`
94pub struct Box<POOL, STATE = Init>
95where
96    POOL: Pool,
97    STATE: 'static,
98{
99    _pool: PhantomData<POOL>,
100    inner: super::Box<POOL::Data, STATE>,
101}
102
103impl<P> Box<P, Uninit>
104where
105    P: Pool,
106{
107    /// Initializes this memory block
108    pub fn init(self, val: P::Data) -> Box<P, Init> {
109        let node = self.inner.node;
110
111        mem::forget(self);
112
113        if mem::size_of::<P::Data>() == 0 {
114            // no memory operation needed for ZST
115            // BUT we want to avoid calling `val`s destructor
116            mem::forget(val)
117        } else {
118            unsafe {
119                ptr::write(node.as_ref().data.get(), val);
120            }
121        }
122
123        Box {
124            inner: super::Box {
125                node,
126                _state: PhantomData,
127            },
128            _pool: PhantomData,
129        }
130    }
131}
132
133impl<P> Box<P, Uninit>
134where
135    P: Pool,
136    P::Data: AsRef<[u8]>,
137{
138    #[deprecated(
139        since = "0.7.3",
140        note = "This can access uninitialized memory, use `init(..)` instead (https://github.com/japaric/heapless/issues/212)"
141    )]
142    /// (DO NOT USE, SEE DEPRECATION) Freezes the contents of this memory block
143    ///
144    /// See [rust-lang/rust#58363](https://github.com/rust-lang/rust/pull/58363) for details.
145    pub fn freeze(self) -> Box<P, Init> {
146        let node = self.inner.node;
147
148        mem::forget(self);
149
150        // it seems we can get away with not calling `ptr::freeze` here and not run into UB
151        // because we are dealing with static memory and using fences
152        // let p: *const u8 = (*node.as_ref().data.get()).as_slice().as_ptr();
153        // ptr::freeze(p as *mut u8);
154
155        Box {
156            inner: super::Box {
157                node,
158                _state: PhantomData,
159            },
160            _pool: PhantomData,
161        }
162    }
163}
164
165impl<P> Box<P, Init>
166where
167    P: Pool,
168{
169    /// Forgets the contents of this memory block without running its destructor.
170    ///
171    /// Note that this this does not return the memory block to the pool. The
172    /// block can be reused, or returned to the pool by dropping it.
173    pub fn forget(self) -> Box<P, Uninit> {
174        let node = self.inner.node;
175
176        mem::forget(self);
177        if mem::size_of::<P::Data>() == 0 {
178            // no need to do a pointer dereference in this case
179        } else {
180            mem::forget(unsafe { ptr::read(node.as_ref().data.get()) });
181        }
182
183        Box {
184            inner: super::Box {
185                node,
186                _state: PhantomData,
187            },
188            _pool: PhantomData,
189        }
190    }
191}
192
193impl<P> Deref for Box<P>
194where
195    P: Pool,
196{
197    type Target = P::Data;
198
199    fn deref(&self) -> &P::Data {
200        self.inner.deref()
201    }
202}
203
204impl<P> DerefMut for Box<P>
205where
206    P: Pool,
207{
208    fn deref_mut(&mut self) -> &mut P::Data {
209        self.inner.deref_mut()
210    }
211}
212
213unsafe impl<P: Pool> stable_deref_trait::StableDeref for Box<P> {}
214
215impl<P> fmt::Debug for Box<P>
216where
217    P: Pool,
218    P::Data: fmt::Debug,
219{
220    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
221        <P::Data as fmt::Debug>::fmt(self, f)
222    }
223}
224
225impl<P> fmt::Display for Box<P>
226where
227    P: Pool,
228    P::Data: fmt::Display,
229{
230    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
231        <P::Data as fmt::Display>::fmt(self, f)
232    }
233}
234
235impl<P, S> Drop for Box<P, S>
236where
237    P: Pool,
238    S: 'static,
239{
240    fn drop(&mut self) {
241        if TypeId::of::<S>() == TypeId::of::<Init>() {
242            let p = if mem::size_of::<P::Data>() == 0 {
243                // any pointer will do to invoke the destructor of a ZST
244                NonNull::dangling().as_ptr()
245            } else {
246                unsafe { self.inner.node.as_ref().data.get() }
247            };
248            unsafe {
249                ptr::drop_in_place(p);
250            }
251        }
252
253        if mem::size_of::<P::Data>() != 0 {
254            P::ptr().stack.push(self.inner.node)
255        }
256    }
257}
258
259unsafe impl<P, S> Send for Box<P, S>
260where
261    P: Pool,
262    P::Data: Send,
263{
264}
265
266unsafe impl<P, S> Sync for Box<P, S>
267where
268    P: Pool,
269    P::Data: Sync,
270{
271}
272
273impl<P, T> AsRef<[T]> for Box<P>
274where
275    P: Pool,
276    P::Data: AsRef<[T]>,
277{
278    fn as_ref(&self) -> &[T] {
279        self.deref().as_ref()
280    }
281}
282
283impl<P, T> AsMut<[T]> for Box<P>
284where
285    P: Pool,
286    P::Data: AsMut<[T]>,
287{
288    fn as_mut(&mut self) -> &mut [T] {
289        self.deref_mut().as_mut()
290    }
291}
292
293impl<P> PartialEq for Box<P>
294where
295    P: Pool,
296    P::Data: PartialEq,
297{
298    fn eq(&self, rhs: &Box<P>) -> bool {
299        <P::Data as PartialEq>::eq(self, rhs)
300    }
301}
302
303impl<P> Eq for Box<P>
304where
305    P: Pool,
306    P::Data: Eq,
307{
308}
309
310impl<P> PartialOrd for Box<P>
311where
312    P: Pool,
313    P::Data: PartialOrd,
314{
315    fn partial_cmp(&self, rhs: &Box<P>) -> Option<cmp::Ordering> {
316        <P::Data as PartialOrd>::partial_cmp(self, rhs)
317    }
318}
319
320impl<P> Ord for Box<P>
321where
322    P: Pool,
323    P::Data: Ord,
324{
325    fn cmp(&self, rhs: &Box<P>) -> cmp::Ordering {
326        <P::Data as Ord>::cmp(self, rhs)
327    }
328}
329
330impl<P> Hash for Box<P>
331where
332    P: Pool,
333    P::Data: Hash,
334{
335    fn hash<H>(&self, state: &mut H)
336    where
337        H: Hasher,
338    {
339        <P::Data as Hash>::hash(self, state)
340    }
341}
342
343#[cfg(test)]
344mod tests {
345    use core::{
346        mem,
347        sync::atomic::{AtomicUsize, Ordering},
348    };
349
350    use super::{super::Node, Pool};
351
352    #[test]
353    fn sanity() {
354        const SZ: usize = 2 * mem::size_of::<Node<u8>>() - 1;
355        static mut MEMORY: [u8; SZ] = [0; SZ];
356
357        pool!(A: u8);
358
359        // empty pool
360        assert!(A::alloc().is_none());
361
362        A::grow(unsafe { &mut MEMORY });
363
364        let x = A::alloc().unwrap().init(0);
365        assert_eq!(*x, 0);
366
367        // pool exhausted
368        assert!(A::alloc().is_none());
369
370        drop(x);
371
372        // should be possible to allocate again
373        assert_eq!(*A::alloc().unwrap().init(1), 1);
374    }
375
376    #[test]
377    fn boxed_zst_is_well_aligned() {
378        #[repr(align(2))]
379        pub struct Zst2;
380
381        pool!(A: Zst2);
382
383        let x = A::alloc().unwrap().init(Zst2);
384        assert_eq!(0, &*x as *const Zst2 as usize % 2);
385
386        #[repr(align(4096))]
387        pub struct Zst4096;
388
389        pool!(B: Zst4096);
390
391        let x = B::alloc().unwrap().init(Zst4096);
392        assert_eq!(0, &*x as *const Zst4096 as usize % 4096);
393    }
394
395    #[test]
396    fn destructors() {
397        static COUNT: AtomicUsize = AtomicUsize::new(0);
398
399        pub struct X;
400
401        impl X {
402            fn new() -> X {
403                COUNT.fetch_add(1, Ordering::Relaxed);
404                X
405            }
406        }
407
408        impl Drop for X {
409            fn drop(&mut self) {
410                COUNT.fetch_sub(1, Ordering::Relaxed);
411            }
412        }
413
414        pool!(A: X);
415
416        let x = A::alloc().unwrap().init(X::new());
417        let y = A::alloc().unwrap().init(X::new());
418        let z = A::alloc().unwrap().init(X::new());
419
420        assert_eq!(COUNT.load(Ordering::Relaxed), 3);
421
422        // this runs `X`'s destructor
423        drop(x);
424
425        assert_eq!(COUNT.load(Ordering::Relaxed), 2);
426
427        // this leaks memory
428        mem::forget(y);
429
430        assert_eq!(COUNT.load(Ordering::Relaxed), 2);
431
432        // this forgets `X` without leaking memory
433        z.forget();
434
435        assert_eq!(COUNT.load(Ordering::Relaxed), 2);
436    }
437}