devela/data/uid/pin/
box.rs

1// devela::data::uid::pin::box
2//
3//! Pinned memory-based unique IDs.
4//
5
6use crate::{addr_of, Box, Pin};
7
8/// A unique identifier based on a pinned heap-allocated memory address.
9///
10/// `IdPinBox` generates a unique ID by pinning a value in heap memory,
11/// ensuring that the ID remains stable and unique based on the memory address.
12///
13/// See also [`IdPin`][crate::IdPin].
14///
15#[doc = crate::doc_!(vendor: "object-id")]
16#[derive(Clone)]
17pub struct IdPinBox {
18    inner: Pin<Box<u8>>,
19}
20
21impl IdPinBox {
22    /// Creates a new `IdPinBox` with a unique memory address.
23    pub fn new() -> Self {
24        Self::default()
25    }
26
27    /// Returns the unique ID as a `usize`, derived from the memory address.
28    pub fn as_usize(&self) -> usize {
29        addr_of!(*self.inner) as usize
30    }
31}
32
33mod impl_traits {
34    use crate::{Box, Debug, FmtResult, Formatter, Hash, Hasher, IdPinBox, Ordering, Ptr};
35
36    impl Default for IdPinBox {
37        fn default() -> Self {
38            Self { inner: Box::pin(0) }
39        }
40    }
41
42    impl Debug for IdPinBox {
43        fn fmt(&self, f: &mut Formatter) -> FmtResult<()> {
44            write!(f, "{}", self.as_usize())
45        }
46    }
47    impl Hash for IdPinBox {
48        fn hash<H: Hasher>(&self, state: &mut H) {
49            self.as_usize().hash(state);
50        }
51    }
52
53    impl PartialEq for IdPinBox {
54        fn eq(&self, other: &Self) -> bool {
55            Ptr::eq(&*self.inner, &*other.inner)
56        }
57    }
58    impl Eq for IdPinBox {}
59
60    impl PartialOrd for IdPinBox {
61        fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
62            Some(self.as_usize().cmp(&other.as_usize()))
63        }
64    }
65    impl Ord for IdPinBox {
66        fn cmp(&self, other: &Self) -> Ordering {
67            self.as_usize().cmp(&other.as_usize())
68        }
69    }
70}
71
72#[cfg(all(test, feature = "alloc"))]
73mod test {
74    use crate::_dep::_alloc::{sync::Arc, vec};
75
76    use super::IdPinBox;
77
78    #[derive(Clone, Eq, PartialEq, Debug)]
79    struct Test {
80        id: IdPinBox,
81    }
82
83    struct TestWrapper {
84        inner: Arc<Test>,
85    }
86
87    impl Clone for TestWrapper {
88        fn clone(&self) -> Self {
89            Self { inner: self.inner.clone() }
90        }
91    }
92
93    impl TestWrapper {
94        fn new() -> Self {
95            Self { inner: Test { id: <_>::default() }.into() }
96        }
97        fn id(&self) -> usize {
98            self.inner.id.as_usize()
99        }
100    }
101
102    #[cfg(not(debug_assertions))]
103    #[test]
104    fn test_stack() {
105        panic!(
106            "the test MUST be run for the debug target,
107as there is still a chance the object generator may be inlined"
108        );
109    }
110    #[cfg(debug_assertions)]
111    #[test]
112    fn test_stack() {
113        #[inline(never)]
114        fn generate() -> (Test, usize) {
115            let t = Test { id: <_>::default() };
116            let n = t.id.as_usize();
117            (t, n)
118        }
119        let (t, n) = generate();
120        assert_eq!(t.id.as_usize(), n);
121    }
122    #[test]
123    fn test_clone_eq() {
124        let t = Test { id: <_>::default() };
125        let t2 = t.clone();
126        assert_ne!(t.id, t2.id);
127        assert_ne!(t.id.as_usize(), t2.id.as_usize());
128        assert_ne!(t, t2);
129        assert_eq!(t, t);
130        assert_eq!(t.id, t.id);
131    }
132    #[test]
133    fn test_heap_movement() {
134        let t = Test { id: <_>::default() };
135        let n = t.id.as_usize();
136        let mut x = vec![t];
137        assert_eq!(x[0].id.as_usize(), n);
138        let t_back = x.pop().unwrap();
139        assert_eq!(t_back.id.as_usize(), n);
140    }
141    #[test]
142    fn test_arc_covered() {
143        let t1 = TestWrapper::new();
144        let t2 = t1.clone();
145        assert_eq!(t1.id(), t2.id());
146    }
147}