rtic_time/linked_list.rs
1use core::marker::PhantomPinned;
2use core::pin::Pin;
3use core::sync::atomic::{AtomicPtr, Ordering};
4use critical_section as cs;
5
6/// An atomic sorted linked list for the timer queue.
7///
8/// Atomicity is guaranteed using very short [`critical_section`]s, so this list is _not_
9/// lock free, but it will not deadlock.
10pub(crate) struct LinkedList<T> {
11 head: AtomicPtr<Link<T>>,
12}
13
14impl<T> LinkedList<T> {
15 /// Create a new linked list.
16 pub const fn new() -> Self {
17 Self {
18 head: AtomicPtr::new(core::ptr::null_mut()),
19 }
20 }
21}
22
23impl<T: PartialOrd + Clone> LinkedList<T> {
24 /// Pop the first element in the queue if the closure returns true.
25 pub fn pop_if<F: FnOnce(&T) -> bool>(&self, f: F) -> Option<T> {
26 cs::with(|_| {
27 // Make sure all previous writes are visible
28 core::sync::atomic::fence(Ordering::SeqCst);
29
30 let head = self.head.load(Ordering::Relaxed);
31
32 // SAFETY: `as_ref` is safe as `insert` requires a valid reference to a link
33 if let Some(head) = unsafe { head.as_ref() } {
34 if f(&head.val) {
35 // Move head to the next element
36 self.head
37 .store(head.next.load(Ordering::Relaxed), Ordering::Relaxed);
38
39 // We read the value at head
40 let head_val = head.val.clone();
41
42 return Some(head_val);
43 }
44 }
45 None
46 })
47 }
48
49 /// Delete a link at an address.
50 pub fn delete(&self, addr: usize) {
51 cs::with(|_| {
52 // Make sure all previous writes are visible
53 core::sync::atomic::fence(Ordering::SeqCst);
54
55 let head = self.head.load(Ordering::Relaxed);
56
57 // SAFETY: `as_ref` is safe as `insert` requires a valid reference to a link
58 let head_ref = if let Some(head_ref) = unsafe { head.as_ref() } {
59 head_ref
60 } else {
61 // 1. List is empty, do nothing
62 return;
63 };
64
65 if head as *const _ as usize == addr {
66 // 2. Replace head with head.next
67 self.head
68 .store(head_ref.next.load(Ordering::Relaxed), Ordering::Relaxed);
69
70 return;
71 }
72
73 // 3. search list for correct node
74 let mut curr = head_ref;
75 let mut next = head_ref.next.load(Ordering::Relaxed);
76
77 // SAFETY: `as_ref` is safe as `insert` requires a valid reference to a link
78 while let Some(next_link) = unsafe { next.as_ref() } {
79 // Next is not null
80
81 if next as *const _ as usize == addr {
82 curr.next
83 .store(next_link.next.load(Ordering::Relaxed), Ordering::Relaxed);
84
85 return;
86 }
87
88 // Continue searching
89 curr = next_link;
90 next = next_link.next.load(Ordering::Relaxed);
91 }
92 })
93 }
94
95 /// Insert a new link into the linked list.
96 /// The return is (updated head, address), where the address of the link is for use
97 /// with `delete`.
98 ///
99 /// SAFETY: The pinned link must live until it is removed from this list.
100 pub unsafe fn insert(&self, val: Pin<&Link<T>>) -> (bool, usize) {
101 cs::with(|_| {
102 // SAFETY: This datastructure does not move the underlying value.
103 let val = val.get_ref();
104 let addr = val as *const _ as usize;
105
106 // Make sure all previous writes are visible
107 core::sync::atomic::fence(Ordering::SeqCst);
108
109 let head = self.head.load(Ordering::Relaxed);
110
111 // 3 cases to handle
112
113 // 1. List is empty, write to head
114 // SAFETY: `as_ref` is safe as `insert` requires a valid reference to a link
115 let head_ref = if let Some(head_ref) = unsafe { head.as_ref() } {
116 head_ref
117 } else {
118 self.head
119 .store(val as *const _ as *mut _, Ordering::Relaxed);
120 return (true, addr);
121 };
122
123 // 2. val needs to go in first
124 if val.val < head_ref.val {
125 // Set current head as next of `val`
126 val.next.store(head, Ordering::Relaxed);
127
128 // `val` is now first in the queue
129 self.head
130 .store(val as *const _ as *mut _, Ordering::Relaxed);
131
132 return (true, addr);
133 }
134
135 // 3. search list for correct place
136 let mut curr = head_ref;
137 let mut next = head_ref.next.load(Ordering::Relaxed);
138
139 // SAFETY: `as_ref` is safe as `insert` requires a valid reference to a link
140 while let Some(next_link) = unsafe { next.as_ref() } {
141 // Next is not null
142
143 if val.val < next_link.val {
144 // Replace next with `val`
145 val.next.store(next, Ordering::Relaxed);
146
147 // Insert `val`
148 curr.next
149 .store(val as *const _ as *mut _, Ordering::Relaxed);
150
151 return (false, addr);
152 }
153
154 // Continue searching
155 curr = next_link;
156 next = next_link.next.load(Ordering::Relaxed);
157 }
158
159 // No next, write link to last position in list
160 curr.next
161 .store(val as *const _ as *mut _, Ordering::Relaxed);
162
163 (false, addr)
164 })
165 }
166}
167
168/// A link in the linked list.
169pub struct Link<T> {
170 pub(crate) val: T,
171 next: AtomicPtr<Link<T>>,
172 _up: PhantomPinned,
173}
174
175impl<T> Link<T> {
176 /// Create a new link.
177 pub const fn new(val: T) -> Self {
178 Self {
179 val,
180 next: AtomicPtr::new(core::ptr::null_mut()),
181 _up: PhantomPinned,
182 }
183 }
184}