1use core::{
84 fmt,
85 hash::{Hash, Hasher},
86 mem::{ManuallyDrop, MaybeUninit},
87 ops, ptr,
88};
89
90use stable_deref_trait::StableDeref;
91
92use super::treiber::{NonNullPtr, Stack, UnionNode};
93
94#[macro_export]
98macro_rules! box_pool {
99 ($name:ident: $data_type:ty) => {
100 pub struct $name;
101
102 impl $crate::pool::boxed::BoxPool for $name {
103 type Data = $data_type;
104
105 fn singleton() -> &'static $crate::pool::boxed::BoxPoolImpl<$data_type> {
106 #[allow(non_upper_case_globals)]
109 static $name: $crate::pool::boxed::BoxPoolImpl<$data_type> =
110 $crate::pool::boxed::BoxPoolImpl::new();
111
112 &$name
113 }
114 }
115
116 impl $name {
117 #[allow(dead_code)]
119 pub fn alloc(
120 &self,
121 value: $data_type,
122 ) -> Result<$crate::pool::boxed::Box<$name>, $data_type> {
123 <$name as $crate::pool::boxed::BoxPool>::alloc(value)
124 }
125
126 #[allow(dead_code)]
128 pub fn manage(&self, block: &'static mut $crate::pool::boxed::BoxBlock<$data_type>) {
129 <$name as $crate::pool::boxed::BoxPool>::manage(block)
130 }
131 }
132 };
133}
134
135pub trait BoxPool: Sized {
149 type Data: 'static;
151
152 #[doc(hidden)]
154 fn singleton() -> &'static BoxPoolImpl<Self::Data>;
155
156 fn alloc(value: Self::Data) -> Result<Box<Self>, Self::Data> {
164 Ok(Box {
165 node_ptr: Self::singleton().alloc(value)?,
166 })
167 }
168
169 fn manage(block: &'static mut BoxBlock<Self::Data>) {
171 Self::singleton().manage(block);
172 }
173}
174
175pub struct Box<P>
177where
178 P: BoxPool,
179{
180 node_ptr: NonNullPtr<UnionNode<MaybeUninit<P::Data>>>,
181}
182
183impl<A> Clone for Box<A>
184where
185 A: BoxPool,
186 A::Data: Clone,
187{
188 fn clone(&self) -> Self {
189 A::alloc((**self).clone()).ok().expect("OOM")
190 }
191}
192
193impl<A> fmt::Debug for Box<A>
194where
195 A: BoxPool,
196 A::Data: fmt::Debug,
197{
198 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
199 A::Data::fmt(self, f)
200 }
201}
202
203impl<P> ops::Deref for Box<P>
204where
205 P: BoxPool,
206{
207 type Target = P::Data;
208
209 fn deref(&self) -> &Self::Target {
210 unsafe { &*self.node_ptr.as_ptr().cast::<P::Data>() }
211 }
212}
213
214impl<P> ops::DerefMut for Box<P>
215where
216 P: BoxPool,
217{
218 fn deref_mut(&mut self) -> &mut Self::Target {
219 unsafe { &mut *self.node_ptr.as_ptr().cast::<P::Data>() }
220 }
221}
222
223unsafe impl<P> StableDeref for Box<P> where P: BoxPool {}
224
225impl<A> fmt::Display for Box<A>
226where
227 A: BoxPool,
228 A::Data: fmt::Display,
229{
230 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
231 A::Data::fmt(self, f)
232 }
233}
234
235impl<P> Drop for Box<P>
236where
237 P: BoxPool,
238{
239 fn drop(&mut self) {
240 let node = self.node_ptr;
241
242 unsafe { ptr::drop_in_place(node.as_ptr().cast::<P::Data>()) }
243
244 unsafe { P::singleton().stack.push(node) }
245 }
246}
247
248impl<A> Eq for Box<A>
249where
250 A: BoxPool,
251 A::Data: Eq,
252{
253}
254
255impl<A> Hash for Box<A>
256where
257 A: BoxPool,
258 A::Data: Hash,
259{
260 fn hash<H>(&self, state: &mut H)
261 where
262 H: Hasher,
263 {
264 (**self).hash(state);
265 }
266}
267
268impl<A> Ord for Box<A>
269where
270 A: BoxPool,
271 A::Data: Ord,
272{
273 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
274 A::Data::cmp(self, other)
275 }
276}
277
278impl<A, B> PartialEq<Box<B>> for Box<A>
279where
280 A: BoxPool,
281 B: BoxPool,
282 A::Data: PartialEq<B::Data>,
283{
284 fn eq(&self, other: &Box<B>) -> bool {
285 A::Data::eq(self, other)
286 }
287}
288
289impl<A, B> PartialOrd<Box<B>> for Box<A>
290where
291 A: BoxPool,
292 B: BoxPool,
293 A::Data: PartialOrd<B::Data>,
294{
295 fn partial_cmp(&self, other: &Box<B>) -> Option<core::cmp::Ordering> {
296 A::Data::partial_cmp(self, other)
297 }
298}
299
300unsafe impl<P> Send for Box<P>
301where
302 P: BoxPool,
303 P::Data: Send,
304{
305}
306
307unsafe impl<P> Sync for Box<P>
308where
309 P: BoxPool,
310 P::Data: Sync,
311{
312}
313
314#[doc(hidden)]
317pub struct BoxPoolImpl<T> {
318 stack: Stack<UnionNode<MaybeUninit<T>>>,
319}
320
321impl<T> BoxPoolImpl<T> {
322 #[allow(clippy::new_without_default)]
323 pub const fn new() -> Self {
324 Self {
325 stack: Stack::new(),
326 }
327 }
328
329 fn alloc(&self, value: T) -> Result<NonNullPtr<UnionNode<MaybeUninit<T>>>, T> {
330 if let Some(node_ptr) = self.stack.try_pop() {
331 unsafe { node_ptr.as_ptr().cast::<T>().write(value) }
332
333 Ok(node_ptr)
334 } else {
335 Err(value)
336 }
337 }
338
339 fn manage(&self, block: &'static mut BoxBlock<T>) {
340 let node: &'static mut _ = &mut block.node;
341
342 unsafe { self.stack.push(NonNullPtr::from_static_mut_ref(node)) }
343 }
344}
345
346unsafe impl<T> Sync for BoxPoolImpl<T> {}
347
348pub struct BoxBlock<T> {
350 node: UnionNode<MaybeUninit<T>>,
351}
352
353impl<T> BoxBlock<T> {
354 pub const fn new() -> Self {
356 Self {
357 node: UnionNode {
358 data: ManuallyDrop::new(MaybeUninit::uninit()),
359 },
360 }
361 }
362}
363
364impl<T> Default for BoxBlock<T> {
365 fn default() -> Self {
366 Self::new()
367 }
368}
369
370#[cfg(test)]
371mod tests {
372 use core::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
373 use std::ptr::addr_of_mut;
374 use std::thread;
375
376 use super::*;
377
378 #[test]
379 fn cannot_alloc_if_empty() {
380 box_pool!(MyBoxPool: i32);
381
382 assert_eq!(Err(42), MyBoxPool.alloc(42));
383 }
384
385 #[test]
386 fn can_alloc_if_pool_manages_one_block() {
387 box_pool!(MyBoxPool: i32);
388
389 let block = unsafe {
390 static mut BLOCK: BoxBlock<i32> = BoxBlock::new();
391 addr_of_mut!(BLOCK).as_mut().unwrap()
392 };
393 MyBoxPool.manage(block);
394
395 assert_eq!(42, *MyBoxPool.alloc(42).unwrap());
396 }
397
398 #[test]
399 fn alloc_drop_alloc() {
400 box_pool!(MyBoxPool: i32);
401
402 let block = unsafe {
403 static mut BLOCK: BoxBlock<i32> = BoxBlock::new();
404 addr_of_mut!(BLOCK).as_mut().unwrap()
405 };
406 MyBoxPool.manage(block);
407
408 let boxed = MyBoxPool.alloc(1).unwrap();
409
410 drop(boxed);
411
412 assert_eq!(2, *MyBoxPool.alloc(2).unwrap());
413 }
414
415 #[test]
416 fn runs_destructor_exactly_once_on_drop() {
417 static COUNT: AtomicUsize = AtomicUsize::new(0);
418
419 pub struct MyStruct;
420
421 impl Drop for MyStruct {
422 fn drop(&mut self) {
423 COUNT.fetch_add(1, Ordering::Relaxed);
424 }
425 }
426
427 box_pool!(MyBoxPool: MyStruct);
428
429 let block = unsafe {
430 static mut BLOCK: BoxBlock<MyStruct> = BoxBlock::new();
431 addr_of_mut!(BLOCK).as_mut().unwrap()
432 };
433 MyBoxPool.manage(block);
434
435 let boxed = MyBoxPool.alloc(MyStruct).ok().unwrap();
436
437 assert_eq!(0, COUNT.load(Ordering::Relaxed));
438
439 drop(boxed);
440
441 assert_eq!(1, COUNT.load(Ordering::Relaxed));
442 }
443
444 #[test]
445 fn zst_is_well_aligned() {
446 #[repr(align(4096))]
447 pub struct Zst4096;
448
449 box_pool!(MyBoxPool: Zst4096);
450
451 let block = unsafe {
452 static mut BLOCK: BoxBlock<Zst4096> = BoxBlock::new();
453 addr_of_mut!(BLOCK).as_mut().unwrap()
454 };
455 MyBoxPool.manage(block);
456
457 let boxed = MyBoxPool.alloc(Zst4096).ok().unwrap();
458
459 let raw = &*boxed as *const Zst4096;
460 assert_eq!(0, raw as usize % 4096);
461 }
462
463 #[test]
464 fn can_clone_if_pool_is_not_exhausted() {
465 static STRUCT_CLONE_WAS_CALLED: AtomicBool = AtomicBool::new(false);
466
467 pub struct MyStruct;
468
469 impl Clone for MyStruct {
470 fn clone(&self) -> Self {
471 STRUCT_CLONE_WAS_CALLED.store(true, Ordering::Relaxed);
472 Self
473 }
474 }
475
476 box_pool!(MyBoxPool: MyStruct);
477
478 MyBoxPool.manage(unsafe {
479 static mut BLOCK: BoxBlock<MyStruct> = BoxBlock::new();
480 addr_of_mut!(BLOCK).as_mut().unwrap()
481 });
482 MyBoxPool.manage(unsafe {
483 static mut BLOCK: BoxBlock<MyStruct> = BoxBlock::new();
484 addr_of_mut!(BLOCK).as_mut().unwrap()
485 });
486
487 let first = MyBoxPool.alloc(MyStruct).ok().unwrap();
488 let _second = first.clone();
489
490 assert!(STRUCT_CLONE_WAS_CALLED.load(Ordering::Relaxed));
491
492 let is_oom = MyBoxPool.alloc(MyStruct).is_err();
493 assert!(is_oom);
494 }
495
496 #[test]
497 fn clone_panics_if_pool_exhausted() {
498 static STRUCT_CLONE_WAS_CALLED: AtomicBool = AtomicBool::new(false);
499
500 pub struct MyStruct;
501
502 impl Clone for MyStruct {
503 fn clone(&self) -> Self {
504 STRUCT_CLONE_WAS_CALLED.store(true, Ordering::Relaxed);
505 Self
506 }
507 }
508
509 box_pool!(MyBoxPool: MyStruct);
510
511 MyBoxPool.manage(unsafe {
512 static mut BLOCK: BoxBlock<MyStruct> = BoxBlock::new();
513 addr_of_mut!(BLOCK).as_mut().unwrap()
514 });
515
516 let first = MyBoxPool.alloc(MyStruct).ok().unwrap();
517
518 let thread = thread::spawn(move || {
519 let _second = first.clone();
520 });
521
522 let thread_panicked = thread.join().is_err();
523 assert!(thread_panicked);
524
525 }
529
530 #[test]
531 fn panicking_clone_does_not_leak_memory() {
532 static STRUCT_CLONE_WAS_CALLED: AtomicBool = AtomicBool::new(false);
533
534 pub struct MyStruct;
535
536 impl Clone for MyStruct {
537 fn clone(&self) -> Self {
538 STRUCT_CLONE_WAS_CALLED.store(true, Ordering::Relaxed);
539 panic!()
540 }
541 }
542
543 box_pool!(MyBoxPool: MyStruct);
544
545 MyBoxPool.manage(unsafe {
546 static mut BLOCK: BoxBlock<MyStruct> = BoxBlock::new();
547 addr_of_mut!(BLOCK).as_mut().unwrap()
548 });
549 MyBoxPool.manage(unsafe {
550 static mut BLOCK: BoxBlock<MyStruct> = BoxBlock::new();
551 addr_of_mut!(BLOCK).as_mut().unwrap()
552 });
553
554 let boxed = MyBoxPool.alloc(MyStruct).ok().unwrap();
555
556 let thread = thread::spawn(move || {
557 let _boxed = boxed.clone();
558 });
559
560 let thread_panicked = thread.join().is_err();
561 assert!(thread_panicked);
562
563 assert!(STRUCT_CLONE_WAS_CALLED.load(Ordering::Relaxed));
564
565 let once = MyBoxPool.alloc(MyStruct);
566 let twice = MyBoxPool.alloc(MyStruct);
567
568 assert!(once.is_ok());
569 assert!(twice.is_ok());
570 }
571}