1use core::{
67    cmp::Ordering,
68    fmt,
69    hash::{Hash, Hasher},
70    mem::ManuallyDrop,
71    ops, ptr,
72};
73
74use stable_deref_trait::StableDeref;
75
76use super::treiber::{AtomicPtr, NonNullPtr, Stack, StructNode};
77
78#[macro_export]
83macro_rules! object_pool {
84    ($name:ident: $data_type:ty) => {
85        pub struct $name;
86
87        impl $crate::pool::object::ObjectPool for $name {
88            type Data = $data_type;
89
90            fn singleton() -> &'static $crate::pool::object::ObjectPoolImpl<$data_type> {
91                static $name: $crate::pool::object::ObjectPoolImpl<$data_type> =
92                    $crate::pool::object::ObjectPoolImpl::new();
93
94                &$name
95            }
96        }
97
98        impl $name {
99            #[allow(dead_code)]
101            pub fn request(&self) -> Option<$crate::pool::object::Object<$name>> {
102                <$name as $crate::pool::object::ObjectPool>::request()
103            }
104
105            #[allow(dead_code)]
107            pub fn manage(
108                &self,
109                block: &'static mut $crate::pool::object::ObjectBlock<$data_type>,
110            ) {
111                <$name as $crate::pool::object::ObjectPool>::manage(block)
112            }
113        }
114    };
115}
116
117pub trait ObjectPool: Sized {
119    type Data: 'static;
121
122    #[doc(hidden)]
124    fn singleton() -> &'static ObjectPoolImpl<Self::Data>;
125
126    fn request() -> Option<Object<Self>> {
128        Self::singleton()
129            .request()
130            .map(|node_ptr| Object { node_ptr })
131    }
132
133    fn manage(block: &'static mut ObjectBlock<Self::Data>) {
135        Self::singleton().manage(block)
136    }
137}
138
139#[doc(hidden)]
141pub struct ObjectPoolImpl<T> {
142    stack: Stack<StructNode<T>>,
143}
144
145impl<T> ObjectPoolImpl<T> {
146    #[doc(hidden)]
148    pub const fn new() -> Self {
149        Self {
150            stack: Stack::new(),
151        }
152    }
153
154    fn request(&self) -> Option<NonNullPtr<StructNode<T>>> {
155        self.stack.try_pop()
156    }
157
158    fn manage(&self, block: &'static mut ObjectBlock<T>) {
159        let node: &'static mut _ = &mut block.node;
160
161        unsafe { self.stack.push(NonNullPtr::from_static_mut_ref(node)) }
162    }
163}
164
165unsafe impl<T> Sync for ObjectPoolImpl<T> where T: Send {}
168
169pub struct Object<P>
171where
172    P: ObjectPool,
173{
174    node_ptr: NonNullPtr<StructNode<P::Data>>,
175}
176
177impl<A, T, const N: usize> AsMut<[T]> for Object<A>
178where
179    A: ObjectPool<Data = [T; N]>,
180{
181    fn as_mut(&mut self) -> &mut [T] {
182        &mut **self
183    }
184}
185
186impl<A, T, const N: usize> AsRef<[T]> for Object<A>
187where
188    A: ObjectPool<Data = [T; N]>,
189{
190    fn as_ref(&self) -> &[T] {
191        &**self
192    }
193}
194
195impl<A> fmt::Debug for Object<A>
196where
197    A: ObjectPool,
198    A::Data: fmt::Debug,
199{
200    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
201        A::Data::fmt(self, f)
202    }
203}
204
205impl<A> ops::Deref for Object<A>
206where
207    A: ObjectPool,
208{
209    type Target = A::Data;
210
211    fn deref(&self) -> &Self::Target {
212        unsafe { &*ptr::addr_of!((*self.node_ptr.as_ptr()).data) }
213    }
214}
215
216impl<A> ops::DerefMut for Object<A>
217where
218    A: ObjectPool,
219{
220    fn deref_mut(&mut self) -> &mut Self::Target {
221        unsafe { &mut *ptr::addr_of_mut!((*self.node_ptr.as_ptr()).data) }
222    }
223}
224
225unsafe impl<A> StableDeref for Object<A> where A: ObjectPool {}
226
227impl<A> fmt::Display for Object<A>
228where
229    A: ObjectPool,
230    A::Data: fmt::Display,
231{
232    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
233        A::Data::fmt(self, f)
234    }
235}
236
237impl<P> Drop for Object<P>
238where
239    P: ObjectPool,
240{
241    fn drop(&mut self) {
242        unsafe { P::singleton().stack.push(self.node_ptr) }
243    }
244}
245
246impl<A> Eq for Object<A>
247where
248    A: ObjectPool,
249    A::Data: Eq,
250{
251}
252
253impl<A> Hash for Object<A>
254where
255    A: ObjectPool,
256    A::Data: Hash,
257{
258    fn hash<H>(&self, state: &mut H)
259    where
260        H: Hasher,
261    {
262        (**self).hash(state)
263    }
264}
265
266impl<A> Ord for Object<A>
267where
268    A: ObjectPool,
269    A::Data: Ord,
270{
271    fn cmp(&self, other: &Self) -> Ordering {
272        A::Data::cmp(self, other)
273    }
274}
275
276impl<A, B> PartialEq<Object<B>> for Object<A>
277where
278    A: ObjectPool,
279    B: ObjectPool,
280    A::Data: PartialEq<B::Data>,
281{
282    fn eq(&self, other: &Object<B>) -> bool {
283        A::Data::eq(self, other)
284    }
285}
286
287impl<A, B> PartialOrd<Object<B>> for Object<A>
288where
289    A: ObjectPool,
290    B: ObjectPool,
291    A::Data: PartialOrd<B::Data>,
292{
293    fn partial_cmp(&self, other: &Object<B>) -> Option<Ordering> {
294        A::Data::partial_cmp(self, other)
295    }
296}
297
298unsafe impl<P> Send for Object<P>
299where
300    P: ObjectPool,
301    P::Data: Send,
302{
303}
304
305unsafe impl<P> Sync for Object<P>
306where
307    P: ObjectPool,
308    P::Data: Sync,
309{
310}
311
312pub struct ObjectBlock<T> {
314    node: StructNode<T>,
315}
316
317impl<T> ObjectBlock<T> {
318    pub const fn new(initial_value: T) -> Self {
320        Self {
321            node: StructNode {
322                next: ManuallyDrop::new(AtomicPtr::null()),
323                data: ManuallyDrop::new(initial_value),
324            },
325        }
326    }
327}
328
329#[cfg(test)]
330mod tests {
331    use core::sync::atomic::{self, AtomicUsize};
332
333    use super::*;
334
335    #[test]
336    fn cannot_request_if_empty() {
337        object_pool!(P: i32);
338
339        assert_eq!(None, P.request());
340    }
341
342    #[test]
343    fn can_request_if_manages_one_block() {
344        object_pool!(P: i32);
345
346        let block = unsafe {
347            static mut B: ObjectBlock<i32> = ObjectBlock::new(1);
348            &mut B
349        };
350        P.manage(block);
351
352        assert_eq!(1, *P.request().unwrap());
353    }
354
355    #[test]
356    fn request_drop_request() {
357        object_pool!(P: i32);
358
359        let block = unsafe {
360            static mut B: ObjectBlock<i32> = ObjectBlock::new(1);
361            &mut B
362        };
363        P.manage(block);
364
365        let mut object = P.request().unwrap();
366
367        *object = 2;
368        drop(object);
369
370        assert_eq!(2, *P.request().unwrap());
371    }
372
373    #[test]
374    fn destructor_does_not_run_on_drop() {
375        static COUNT: AtomicUsize = AtomicUsize::new(0);
376
377        pub struct S;
378
379        impl Drop for S {
380            fn drop(&mut self) {
381                COUNT.fetch_add(1, atomic::Ordering::Relaxed);
382            }
383        }
384
385        object_pool!(P: S);
386
387        let block = unsafe {
388            static mut B: ObjectBlock<S> = ObjectBlock::new(S);
389            &mut B
390        };
391        P.manage(block);
392
393        let object = P.request().unwrap();
394
395        assert_eq!(0, COUNT.load(atomic::Ordering::Relaxed));
396
397        drop(object);
398
399        assert_eq!(0, COUNT.load(atomic::Ordering::Relaxed));
400    }
401
402    #[test]
403    fn zst_is_well_aligned() {
404        #[repr(align(4096))]
405        pub struct Zst4096;
406
407        object_pool!(P: Zst4096);
408
409        let block = unsafe {
410            static mut B: ObjectBlock<Zst4096> = ObjectBlock::new(Zst4096);
411            &mut B
412        };
413        P.manage(block);
414
415        let object = P.request().unwrap();
416
417        let raw = &*object as *const Zst4096;
418        assert_eq!(0, raw as usize % 4096);
419    }
420}