devela/data/id/pin/
box.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
// devela::data::id::pin::box
//
//! Pinned memory-based unique IDs.
//

use crate::{addr_of, Box, Pin};

/// A unique identifier based on a pinned heap-allocated memory address.
///
/// `IdPinBox` generates a unique ID by pinning a value in heap memory,
/// ensuring that the ID remains stable and unique based on the memory address.
#[derive(Clone)]
pub struct IdPinBox {
    inner: Pin<Box<u8>>,
}

impl IdPinBox {
    /// Creates a new `IdPinBox` with a unique memory address.
    pub fn new() -> Self {
        Self::default()
    }

    /// Returns the unique ID as a `usize`, derived from the memory address.
    pub fn as_usize(&self) -> usize {
        addr_of!(*self.inner) as usize
    }
}

mod impl_traits {
    use crate::{Box, Debug, FmtResult, Formatter, Hash, Hasher, IdPinBox, Ordering, Ptr};

    impl Default for IdPinBox {
        fn default() -> Self {
            Self { inner: Box::pin(0) }
        }
    }

    impl Debug for IdPinBox {
        fn fmt(&self, f: &mut Formatter) -> FmtResult<()> {
            write!(f, "{}", self.as_usize())
        }
    }
    impl Hash for IdPinBox {
        fn hash<H: Hasher>(&self, state: &mut H) {
            self.as_usize().hash(state);
        }
    }

    impl PartialEq for IdPinBox {
        fn eq(&self, other: &Self) -> bool {
            Ptr::eq(&*self.inner, &*other.inner)
        }
    }
    impl Eq for IdPinBox {}

    impl PartialOrd for IdPinBox {
        fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
            Some(self.as_usize().cmp(&other.as_usize()))
        }
    }
    impl Ord for IdPinBox {
        fn cmp(&self, other: &Self) -> Ordering {
            self.as_usize().cmp(&other.as_usize())
        }
    }
}

#[cfg(all(test, feature = "alloc"))]
mod test {
    use crate::_dep::_alloc::{sync::Arc, vec};

    use super::IdPinBox;

    #[derive(Clone, Eq, PartialEq, Debug)]
    struct Test {
        id: IdPinBox,
    }

    struct TestWrapper {
        inner: Arc<Test>,
    }

    impl Clone for TestWrapper {
        fn clone(&self) -> Self {
            Self { inner: self.inner.clone() }
        }
    }

    impl TestWrapper {
        fn new() -> Self {
            Self { inner: Test { id: <_>::default() }.into() }
        }
        fn id(&self) -> usize {
            self.inner.id.as_usize()
        }
    }

    #[cfg(not(debug_assertions))]
    #[test]
    fn test_stack() {
        panic!(
            "the test MUST be run for the debug target,
as there is still a chance the object generator may be inlined"
        );
    }
    #[cfg(debug_assertions)]
    #[test]
    fn test_stack() {
        #[inline(never)]
        fn generate() -> (Test, usize) {
            let t = Test { id: <_>::default() };
            let n = t.id.as_usize();
            (t, n)
        }
        let (t, n) = generate();
        assert_eq!(t.id.as_usize(), n);
    }
    #[test]
    fn test_clone_eq() {
        let t = Test { id: <_>::default() };
        let t2 = t.clone();
        assert_ne!(t.id, t2.id);
        assert_ne!(t.id.as_usize(), t2.id.as_usize());
        assert_ne!(t, t2);
        assert_eq!(t, t);
        assert_eq!(t.id, t.id);
    }
    #[test]
    fn test_heap_movement() {
        let t = Test { id: <_>::default() };
        let n = t.id.as_usize();
        let mut x = vec![t];
        assert_eq!(x[0].id.as_usize(), n);
        let t_back = x.pop().unwrap();
        assert_eq!(t_back.id.as_usize(), n);
    }
    #[test]
    fn test_arc_covered() {
        let t1 = TestWrapper::new();
        let t2 = t1.clone();
        assert_eq!(t1.id(), t2.id());
    }
}