heapless/pool/treiber/
llsc.rs

1use core::{
2    cell::UnsafeCell,
3    ptr::{self, NonNull},
4};
5
6use super::{Node, Stack};
7
8pub struct AtomicPtr<N>
9where
10    N: Node,
11{
12    inner: UnsafeCell<Option<NonNull<N>>>,
13}
14
15impl<N> AtomicPtr<N>
16where
17    N: Node,
18{
19    pub const fn null() -> Self {
20        Self {
21            inner: UnsafeCell::new(None),
22        }
23    }
24}
25
26pub struct NonNullPtr<N>
27where
28    N: Node,
29{
30    inner: NonNull<N>,
31}
32
33impl<N> NonNullPtr<N>
34where
35    N: Node,
36{
37    pub fn as_ptr(&self) -> *mut N {
38        self.inner.as_ptr().cast()
39    }
40
41    pub fn from_static_mut_ref(ref_: &'static mut N) -> Self {
42        Self {
43            inner: NonNull::from(ref_),
44        }
45    }
46}
47
48impl<N> Clone for NonNullPtr<N>
49where
50    N: Node,
51{
52    fn clone(&self) -> Self {
53        Self { inner: self.inner }
54    }
55}
56
57impl<N> Copy for NonNullPtr<N> where N: Node {}
58
59pub unsafe fn push<N>(stack: &Stack<N>, mut node: NonNullPtr<N>)
60where
61    N: Node,
62{
63    let top_addr = ptr::addr_of!(stack.top) as *mut usize;
64
65    loop {
66        let top = arch::load_link(top_addr);
67
68        node.inner
69            .as_mut()
70            .next_mut()
71            .inner
72            .get()
73            .write(NonNull::new(top as *mut _));
74
75        if arch::store_conditional(node.inner.as_ptr() as usize, top_addr).is_ok() {
76            break;
77        }
78    }
79}
80
81pub fn try_pop<N>(stack: &Stack<N>) -> Option<NonNullPtr<N>>
82where
83    N: Node,
84{
85    unsafe {
86        let top_addr = ptr::addr_of!(stack.top) as *mut usize;
87
88        loop {
89            let top = arch::load_link(top_addr);
90
91            if let Some(top) = NonNull::new(top as *mut N) {
92                let next = &top.as_ref().next();
93
94                if arch::store_conditional(
95                    next.inner
96                        .get()
97                        .read()
98                        .map(|non_null| non_null.as_ptr() as usize)
99                        .unwrap_or_default(),
100                    top_addr,
101                )
102                .is_ok()
103                {
104                    break Some(NonNullPtr { inner: top });
105                }
106            } else {
107                arch::clear_load_link();
108
109                break None;
110            }
111        }
112    }
113}
114
115#[cfg(arm_llsc)]
116mod arch {
117    use core::arch::asm;
118
119    #[inline(always)]
120    pub fn clear_load_link() {
121        unsafe { asm!("clrex", options(nomem, nostack)) }
122    }
123
124    /// # Safety
125    /// - `addr` must be a valid pointer
126    #[inline(always)]
127    pub unsafe fn load_link(addr: *const usize) -> usize {
128        let value;
129        asm!("ldrex {}, [{}]", out(reg) value, in(reg) addr, options(nostack));
130        value
131    }
132
133    /// # Safety
134    /// - `addr` must be a valid pointer
135    #[inline(always)]
136    pub unsafe fn store_conditional(value: usize, addr: *mut usize) -> Result<(), ()> {
137        let outcome: usize;
138        asm!("strex {}, {}, [{}]", out(reg) outcome, in(reg) value, in(reg) addr, options(nostack));
139        if outcome == 0 {
140            Ok(())
141        } else {
142            Err(())
143        }
144    }
145}