portable_atomic/imp/fallback/
outline_atomics.rs
1use core::sync::atomic::Ordering;
14
15#[cfg(any(target_arch = "x86_64", target_arch = "powerpc64", target_arch = "riscv64"))]
16pub(crate) type Udw = u128;
17#[cfg(any(target_arch = "x86_64", target_arch = "powerpc64", target_arch = "riscv64"))]
18pub(crate) type AtomicUdw = super::super::super::fallback::AtomicU128;
19#[cfg(any(target_arch = "x86_64", target_arch = "powerpc64", target_arch = "riscv64"))]
20pub(crate) type AtomicIdw = super::super::super::fallback::AtomicI128;
21
22#[cfg(any(target_arch = "arm", target_arch = "riscv32"))]
23pub(crate) type Udw = u64;
24#[cfg(any(target_arch = "arm", target_arch = "riscv32"))]
25pub(crate) type AtomicUdw = super::super::super::fallback::AtomicU64;
26#[cfg(any(target_arch = "arm", target_arch = "riscv32"))]
27pub(crate) type AtomicIdw = super::super::super::fallback::AtomicI64;
28
29macro_rules! debug_assert_outline_atomics {
31 () => {
32 #[cfg(target_arch = "x86_64")]
33 {
34 debug_assert!(!super::detect::detect().has_cmpxchg16b());
35 }
36 #[cfg(target_arch = "powerpc64")]
37 {
38 debug_assert!(!super::detect::detect().has_quadword_atomics());
39 }
40 #[cfg(any(target_arch = "riscv32", target_arch = "riscv64"))]
41 {
42 debug_assert!(!super::detect::detect().has_zacas());
43 }
44 #[cfg(target_arch = "arm")]
45 {
46 debug_assert!(!super::has_kuser_cmpxchg64());
47 }
48 };
49}
50
51#[cold]
52pub(crate) unsafe fn atomic_load(src: *mut Udw, order: Ordering) -> Udw {
53 debug_assert_outline_atomics!();
54 #[allow(clippy::cast_ptr_alignment)]
55 unsafe {
57 (*(src as *const AtomicUdw)).load(order)
58 }
59}
60fn_alias! {
61 #[cold]
62 pub(crate) unsafe fn(src: *mut Udw) -> Udw;
63 #[cfg(not(any(target_arch = "arm", target_arch = "x86_64")))]
65 atomic_load_non_seqcst = atomic_load(Ordering::Acquire);
66 atomic_load_seqcst = atomic_load(Ordering::SeqCst);
67}
68
69#[cfg(not(any(target_arch = "arm", target_arch = "riscv32", target_arch = "riscv64")))]
70#[cold]
71pub(crate) unsafe fn atomic_store(dst: *mut Udw, val: Udw, order: Ordering) {
72 debug_assert_outline_atomics!();
73 #[allow(clippy::cast_ptr_alignment)]
74 unsafe {
76 (*(dst as *const AtomicUdw)).store(val, order);
77 }
78}
79#[cfg(not(any(target_arch = "arm", target_arch = "riscv32", target_arch = "riscv64")))]
80fn_alias! {
81 #[cold]
82 pub(crate) unsafe fn(dst: *mut Udw, val: Udw);
83 atomic_store_non_seqcst = atomic_store(Ordering::Release);
85 atomic_store_seqcst = atomic_store(Ordering::SeqCst);
86}
87
88#[cold]
89pub(crate) unsafe fn atomic_compare_exchange(
90 dst: *mut Udw,
91 old: Udw,
92 new: Udw,
93 success: Ordering,
94 failure: Ordering,
95) -> (Udw, bool) {
96 debug_assert_outline_atomics!();
97 #[allow(clippy::cast_ptr_alignment)]
98 unsafe {
100 match (*(dst as *const AtomicUdw)).compare_exchange(old, new, success, failure) {
101 Ok(v) => (v, true),
102 Err(v) => (v, false),
103 }
104 }
105}
106fn_alias! {
107 #[cold]
108 pub(crate) unsafe fn(dst: *mut Udw, old: Udw, new: Udw) -> (Udw, bool);
109 #[cfg(not(any(target_arch = "arm", target_arch = "x86_64")))]
111 atomic_compare_exchange_non_seqcst
112 = atomic_compare_exchange(Ordering::AcqRel, Ordering::Acquire);
113 atomic_compare_exchange_seqcst
114 = atomic_compare_exchange(Ordering::SeqCst, Ordering::SeqCst);
115}
116
117macro_rules! atomic_rmw_3 {
118 (
119 $name:ident($atomic_type:ident::$method_name:ident),
120 $non_seqcst_alias:ident, $seqcst_alias:ident
121 ) => {
122 #[cold]
123 pub(crate) unsafe fn $name(dst: *mut Udw, val: Udw, order: Ordering) -> Udw {
124 debug_assert_outline_atomics!();
125 #[allow(
126 clippy::as_underscore,
127 clippy::cast_possible_wrap,
128 clippy::cast_ptr_alignment,
129 clippy::cast_sign_loss
130 )]
131 unsafe {
133 (*(dst as *const $atomic_type)).$method_name(val as _, order) as Udw
134 }
135 }
136 fn_alias! {
137 #[cold]
138 pub(crate) unsafe fn(dst: *mut Udw, val: Udw) -> Udw;
139 #[cfg(not(any(target_arch = "arm", target_arch = "x86_64")))]
141 $non_seqcst_alias = $name(Ordering::AcqRel);
142 $seqcst_alias = $name(Ordering::SeqCst);
143 }
144 };
145}
146macro_rules! atomic_rmw_2 {
147 (
148 $name:ident($atomic_type:ident::$method_name:ident),
149 $non_seqcst_alias:ident, $seqcst_alias:ident
150 ) => {
151 #[cold]
152 pub(crate) unsafe fn $name(dst: *mut Udw, order: Ordering) -> Udw {
153 debug_assert_outline_atomics!();
154 #[allow(clippy::cast_ptr_alignment)]
155 unsafe {
157 (*(dst as *const $atomic_type)).$method_name(order) as Udw
158 }
159 }
160 fn_alias! {
161 #[cold]
162 pub(crate) unsafe fn(dst: *mut Udw) -> Udw;
163 #[cfg(not(any(target_arch = "arm", target_arch = "x86_64")))]
165 $non_seqcst_alias = $name(Ordering::AcqRel);
166 $seqcst_alias = $name(Ordering::SeqCst);
167 }
168 };
169}
170
171atomic_rmw_3!(atomic_swap(AtomicUdw::swap), atomic_swap_non_seqcst, atomic_swap_seqcst);
172atomic_rmw_3!(atomic_add(AtomicUdw::fetch_add), atomic_add_non_seqcst, atomic_add_seqcst);
173atomic_rmw_3!(atomic_sub(AtomicUdw::fetch_sub), atomic_sub_non_seqcst, atomic_sub_seqcst);
174atomic_rmw_3!(atomic_and(AtomicUdw::fetch_and), atomic_and_non_seqcst, atomic_and_seqcst);
175atomic_rmw_3!(atomic_nand(AtomicUdw::fetch_nand), atomic_nand_non_seqcst, atomic_nand_seqcst);
176atomic_rmw_3!(atomic_or(AtomicUdw::fetch_or), atomic_or_non_seqcst, atomic_or_seqcst);
177atomic_rmw_3!(atomic_xor(AtomicUdw::fetch_xor), atomic_xor_non_seqcst, atomic_xor_seqcst);
178atomic_rmw_3!(atomic_max(AtomicIdw::fetch_max), atomic_max_non_seqcst, atomic_max_seqcst);
179atomic_rmw_3!(atomic_umax(AtomicUdw::fetch_max), atomic_umax_non_seqcst, atomic_umax_seqcst);
180atomic_rmw_3!(atomic_min(AtomicIdw::fetch_min), atomic_min_non_seqcst, atomic_min_seqcst);
181atomic_rmw_3!(atomic_umin(AtomicUdw::fetch_min), atomic_umin_non_seqcst, atomic_umin_seqcst);
182
183atomic_rmw_2!(atomic_not(AtomicUdw::fetch_not), atomic_not_non_seqcst, atomic_not_seqcst);
184atomic_rmw_2!(atomic_neg(AtomicUdw::fetch_neg), atomic_neg_non_seqcst, atomic_neg_seqcst);