1use core::{
69 cmp::Ordering,
70 fmt,
71 hash::{Hash, Hasher},
72 mem::ManuallyDrop,
73 ops, ptr,
74};
75
76use stable_deref_trait::StableDeref;
77
78use super::treiber::{AtomicPtr, NonNullPtr, Stack, StructNode};
79
80#[macro_export]
85macro_rules! object_pool {
86 ($name:ident: $data_type:ty) => {
87 pub struct $name;
88
89 impl $crate::pool::object::ObjectPool for $name {
90 type Data = $data_type;
91
92 fn singleton() -> &'static $crate::pool::object::ObjectPoolImpl<$data_type> {
93 #[allow(non_upper_case_globals)]
96 static $name: $crate::pool::object::ObjectPoolImpl<$data_type> =
97 $crate::pool::object::ObjectPoolImpl::new();
98
99 &$name
100 }
101 }
102
103 impl $name {
104 #[allow(dead_code)]
106 pub fn request(&self) -> Option<$crate::pool::object::Object<$name>> {
107 <$name as $crate::pool::object::ObjectPool>::request()
108 }
109
110 #[allow(dead_code)]
112 pub fn manage(
113 &self,
114 block: &'static mut $crate::pool::object::ObjectBlock<$data_type>,
115 ) {
116 <$name as $crate::pool::object::ObjectPool>::manage(block)
117 }
118 }
119 };
120}
121
122pub trait ObjectPool: Sized {
124 type Data: 'static;
126
127 #[doc(hidden)]
129 fn singleton() -> &'static ObjectPoolImpl<Self::Data>;
130
131 fn request() -> Option<Object<Self>> {
133 Self::singleton()
134 .request()
135 .map(|node_ptr| Object { node_ptr })
136 }
137
138 fn manage(block: &'static mut ObjectBlock<Self::Data>) {
140 Self::singleton().manage(block);
141 }
142}
143
144#[doc(hidden)]
146pub struct ObjectPoolImpl<T> {
147 stack: Stack<StructNode<T>>,
148}
149
150impl<T> ObjectPoolImpl<T> {
151 #[doc(hidden)]
153 pub const fn new() -> Self {
154 Self {
155 stack: Stack::new(),
156 }
157 }
158
159 fn request(&self) -> Option<NonNullPtr<StructNode<T>>> {
160 self.stack.try_pop()
161 }
162
163 fn manage(&self, block: &'static mut ObjectBlock<T>) {
164 let node: &'static mut _ = &mut block.node;
165
166 unsafe { self.stack.push(NonNullPtr::from_static_mut_ref(node)) }
167 }
168}
169
170unsafe impl<T> Sync for ObjectPoolImpl<T> where T: Send {}
173
174pub struct Object<P>
176where
177 P: ObjectPool,
178{
179 node_ptr: NonNullPtr<StructNode<P::Data>>,
180}
181
182impl<A, T, const N: usize> AsMut<[T]> for Object<A>
183where
184 A: ObjectPool<Data = [T; N]>,
185{
186 fn as_mut(&mut self) -> &mut [T] {
187 &mut **self
188 }
189}
190
191impl<A, T, const N: usize> AsRef<[T]> for Object<A>
192where
193 A: ObjectPool<Data = [T; N]>,
194{
195 fn as_ref(&self) -> &[T] {
196 &**self
197 }
198}
199
200impl<A> fmt::Debug for Object<A>
201where
202 A: ObjectPool,
203 A::Data: fmt::Debug,
204{
205 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
206 A::Data::fmt(self, f)
207 }
208}
209
210impl<A> ops::Deref for Object<A>
211where
212 A: ObjectPool,
213{
214 type Target = A::Data;
215
216 fn deref(&self) -> &Self::Target {
217 unsafe { &*ptr::addr_of!((*self.node_ptr.as_ptr()).data) }
218 }
219}
220
221impl<A> ops::DerefMut for Object<A>
222where
223 A: ObjectPool,
224{
225 fn deref_mut(&mut self) -> &mut Self::Target {
226 unsafe { &mut *ptr::addr_of_mut!((*self.node_ptr.as_ptr()).data) }
227 }
228}
229
230unsafe impl<A> StableDeref for Object<A> where A: ObjectPool {}
231
232impl<A> fmt::Display for Object<A>
233where
234 A: ObjectPool,
235 A::Data: fmt::Display,
236{
237 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
238 A::Data::fmt(self, f)
239 }
240}
241
242impl<P> Drop for Object<P>
243where
244 P: ObjectPool,
245{
246 fn drop(&mut self) {
247 unsafe { P::singleton().stack.push(self.node_ptr) }
248 }
249}
250
251impl<A> Eq for Object<A>
252where
253 A: ObjectPool,
254 A::Data: Eq,
255{
256}
257
258impl<A> Hash for Object<A>
259where
260 A: ObjectPool,
261 A::Data: Hash,
262{
263 fn hash<H>(&self, state: &mut H)
264 where
265 H: Hasher,
266 {
267 (**self).hash(state);
268 }
269}
270
271impl<A> Ord for Object<A>
272where
273 A: ObjectPool,
274 A::Data: Ord,
275{
276 fn cmp(&self, other: &Self) -> Ordering {
277 A::Data::cmp(self, other)
278 }
279}
280
281impl<A, B> PartialEq<Object<B>> for Object<A>
282where
283 A: ObjectPool,
284 B: ObjectPool,
285 A::Data: PartialEq<B::Data>,
286{
287 fn eq(&self, other: &Object<B>) -> bool {
288 A::Data::eq(self, other)
289 }
290}
291
292impl<A, B> PartialOrd<Object<B>> for Object<A>
293where
294 A: ObjectPool,
295 B: ObjectPool,
296 A::Data: PartialOrd<B::Data>,
297{
298 fn partial_cmp(&self, other: &Object<B>) -> Option<Ordering> {
299 A::Data::partial_cmp(self, other)
300 }
301}
302
303unsafe impl<P> Send for Object<P>
304where
305 P: ObjectPool,
306 P::Data: Send,
307{
308}
309
310unsafe impl<P> Sync for Object<P>
311where
312 P: ObjectPool,
313 P::Data: Sync,
314{
315}
316
317pub struct ObjectBlock<T> {
319 node: StructNode<T>,
320}
321
322impl<T> ObjectBlock<T> {
323 pub const fn new(initial_value: T) -> Self {
325 Self {
326 node: StructNode {
327 next: ManuallyDrop::new(AtomicPtr::null()),
328 data: ManuallyDrop::new(initial_value),
329 },
330 }
331 }
332}
333
334#[cfg(test)]
335mod tests {
336 use core::sync::atomic::{self, AtomicUsize};
337 use std::ptr::addr_of_mut;
338
339 use super::*;
340
341 #[test]
342 fn cannot_request_if_empty() {
343 object_pool!(MyObjectPool: i32);
344
345 assert_eq!(None, MyObjectPool.request());
346 }
347
348 #[test]
349 fn can_request_if_manages_one_block() {
350 object_pool!(MyObjectPool: i32);
351
352 let block = unsafe {
353 static mut BLOCK: ObjectBlock<i32> = ObjectBlock::new(1);
354 addr_of_mut!(BLOCK).as_mut().unwrap()
355 };
356 MyObjectPool.manage(block);
357
358 assert_eq!(1, *MyObjectPool.request().unwrap());
359 }
360
361 #[test]
362 fn request_drop_request() {
363 object_pool!(MyObjectPool: i32);
364
365 let block = unsafe {
366 static mut BLOCK: ObjectBlock<i32> = ObjectBlock::new(1);
367 addr_of_mut!(BLOCK).as_mut().unwrap()
368 };
369 MyObjectPool.manage(block);
370
371 let mut object = MyObjectPool.request().unwrap();
372
373 *object = 2;
374 drop(object);
375
376 assert_eq!(2, *MyObjectPool.request().unwrap());
377 }
378
379 #[test]
380 fn destructor_does_not_run_on_drop() {
381 static COUNT: AtomicUsize = AtomicUsize::new(0);
382
383 pub struct MyStruct;
384
385 impl Drop for MyStruct {
386 fn drop(&mut self) {
387 COUNT.fetch_add(1, atomic::Ordering::Relaxed);
388 }
389 }
390
391 object_pool!(MyObjectPool: MyStruct);
392
393 let block = unsafe {
394 static mut BLOCK: ObjectBlock<MyStruct> = ObjectBlock::new(MyStruct);
395 addr_of_mut!(BLOCK).as_mut().unwrap()
396 };
397 MyObjectPool.manage(block);
398
399 let object = MyObjectPool.request().unwrap();
400
401 assert_eq!(0, COUNT.load(atomic::Ordering::Relaxed));
402
403 drop(object);
404
405 assert_eq!(0, COUNT.load(atomic::Ordering::Relaxed));
406 }
407
408 #[test]
409 fn zst_is_well_aligned() {
410 #[repr(align(4096))]
411 pub struct Zst4096;
412
413 object_pool!(MyObjectPool: Zst4096);
414
415 let block = unsafe {
416 static mut BLOCK: ObjectBlock<Zst4096> = ObjectBlock::new(Zst4096);
417 addr_of_mut!(BLOCK).as_mut().unwrap()
418 };
419 MyObjectPool.manage(block);
420
421 let object = MyObjectPool.request().unwrap();
422
423 let raw = &*object as *const Zst4096;
424 assert_eq!(0, raw as usize % 4096);
425 }
426}