sel4_abstract_ptr/abstract_ptr/
atomic_operations.rs

1//
2// Copyright 2024, Colias Group, LLC
3//
4// SPDX-License-Identifier: MIT OR Apache-2.0
5//
6
7use core::sync::atomic::Ordering;
8
9use crate::{memory_type::AtomicOps, AbstractPtr};
10
11impl<M, T, A> AbstractPtr<'_, M, T, A>
12where
13    M: AtomicOps<T>,
14{
15    #[inline]
16    pub fn atomic_load(self, order: Ordering) -> M::Value {
17        unsafe { M::atomic_load(self.pointer.as_ptr().cast_const(), order) }
18    }
19
20    #[inline]
21    pub fn atomic_store(self, val: M::Value, order: Ordering) {
22        unsafe {
23            M::atomic_store(self.pointer.as_ptr(), val, order);
24        }
25    }
26
27    #[inline]
28    pub fn atomic_swap(self, val: M::Value, order: Ordering) -> M::Value {
29        unsafe { M::atomic_swap(self.pointer.as_ptr(), val, order) }
30    }
31
32    #[inline]
33    pub fn atomic_compare_exchange(
34        self,
35        current: M::Value,
36        new: M::Value,
37        success: Ordering,
38        failure: Ordering,
39    ) -> Result<M::Value, M::Value> {
40        unsafe { M::atomic_compare_exchange(self.pointer.as_ptr(), current, new, success, failure) }
41    }
42
43    #[inline]
44    pub fn atomic_compare_exchange_weak(
45        self,
46        current: M::Value,
47        new: M::Value,
48        success: Ordering,
49        failure: Ordering,
50    ) -> Result<M::Value, M::Value> {
51        unsafe {
52            M::atomic_compare_exchange_weak(self.pointer.as_ptr(), current, new, success, failure)
53        }
54    }
55
56    #[inline]
57    pub fn atomic_fetch_add(self, val: M::Value, order: Ordering) -> M::Value {
58        unsafe { M::atomic_add(self.pointer.as_ptr(), val, order) }
59    }
60
61    #[inline]
62    pub fn atomic_fetch_sub(self, val: M::Value, order: Ordering) -> M::Value {
63        unsafe { M::atomic_sub(self.pointer.as_ptr(), val, order) }
64    }
65
66    #[inline]
67    pub fn atomic_fetch_and(self, val: M::Value, order: Ordering) -> M::Value {
68        unsafe { M::atomic_and(self.pointer.as_ptr(), val, order) }
69    }
70
71    #[inline]
72    pub fn atomic_fetch_nand(self, val: M::Value, order: Ordering) -> M::Value {
73        unsafe { M::atomic_nand(self.pointer.as_ptr(), val, order) }
74    }
75
76    #[inline]
77    pub fn atomic_fetch_or(self, val: M::Value, order: Ordering) -> M::Value {
78        unsafe { M::atomic_or(self.pointer.as_ptr(), val, order) }
79    }
80
81    #[inline]
82    pub fn atomic_fetch_xor(self, val: M::Value, order: Ordering) -> M::Value {
83        unsafe { M::atomic_xor(self.pointer.as_ptr(), val, order) }
84    }
85
86    #[inline]
87    pub fn atomic_fetch_update<F>(
88        self,
89        set_order: Ordering,
90        fetch_order: Ordering,
91        mut f: F,
92    ) -> Result<M::Value, M::Value>
93    where
94        M::Value: Copy,
95        F: FnMut(M::Value) -> Option<M::Value>,
96    {
97        let mut prev = self.atomic_load(fetch_order);
98        while let Some(next) = f(prev) {
99            match self.atomic_compare_exchange_weak(prev, next, set_order, fetch_order) {
100                x @ Ok(_) => return x,
101                Err(next_prev) => prev = next_prev,
102            }
103        }
104        Err(prev)
105    }
106
107    #[inline]
108    pub fn atomic_fetch_max(self, val: M::Value, order: Ordering) -> M::Value {
109        unsafe { M::atomic_max(self.pointer.as_ptr(), val, order) }
110    }
111
112    #[inline]
113    pub fn atomic_fetch_min(self, val: M::Value, order: Ordering) -> M::Value {
114        unsafe { M::atomic_min(self.pointer.as_ptr(), val, order) }
115    }
116}