1use super::{check_fat_pointer, decompose_pointer, store_metadata, DstArray, DstBuf};
9#[cfg(doc)]
10use crate::MaybeUninit;
11use crate::{
12 ManuallyDrop, Mem, MemAligned,
13 _core::{marker, ptr},
14};
15
16#[doc = crate::TAG_DATA_STRUCTURE!()]
19pub type DstValueUsize<DST , const CAP: usize> = DstValue<DST, DstArray<usize, CAP>>;
29
30#[doc = crate::TAG_DATA_STRUCTURE!()]
31pub struct DstValue<DST: ?Sized, BUF: DstBuf> {
44 _pd: marker::PhantomData<DST>,
45 data: BUF,
47}
48
49impl<DST: ?Sized, BUF: DstBuf> DstValue<DST, BUF> {
50 pub fn new<VAL, F>(val: VAL, get_ref: F) -> Result<DstValue<DST, BUF>, VAL>
65 where
66 F: FnOnce(&VAL) -> &DST,
67 (VAL, BUF::Inner): MemAligned,
68 BUF: Default,
69 {
70 Self::in_buffer(BUF::default(), val, get_ref)
71 }
72
73 pub fn in_buffer<VAL, F: FnOnce(&VAL) -> &DST>(
87 buffer: BUF,
88 val: VAL,
89 get_ref: F,
90 ) -> Result<DstValue<DST, BUF>, VAL>
91 where
92 (VAL, BUF::Inner): MemAligned,
93 {
94 <(VAL, BUF::Inner) as MemAligned>::assert_compatibility();
95
96 let rv = unsafe {
97 let ptr: *const _ = check_fat_pointer(&val, get_ref);
98 let (raw_ptr, meta_len, meta) = decompose_pointer(ptr);
99
100 DstValue::new_raw(&meta[..meta_len], raw_ptr.cast_mut(), size_of::<VAL>(), buffer)
101 };
102 match rv {
103 Some(r) => {
104 Mem::forget(val);
106 Ok(r)
107 }
108 None => Err(val),
109 }
110 }
111
112 pub unsafe fn new_raw(
117 info: &[usize],
118 data: *mut (),
119 size: usize,
120 mut buffer: BUF,
121 ) -> Option<DstValue<DST, BUF>> {
122 let req_words = BUF::round_to_words(size_of_val(info)) + BUF::round_to_words(size);
123 if buffer.extend(req_words).is_err() {
124 return None;
125 }
126
127 let mut rv =
128 ManuallyDrop::new(DstValue::<DST, BUF> { _pd: marker::PhantomData, data: buffer });
129 unsafe {
131 rv.write_value(data, size, info);
132 }
133 Some(ManuallyDrop::into_inner(rv))
134 }
135
136 pub fn replace<VAL>(&mut self, val: VAL, get_ref: impl Fn(&VAL) -> &DST) -> Result<(), VAL>
148 where
149 (VAL, BUF::Inner): MemAligned,
150 {
151 <(VAL, BUF::Inner) as MemAligned>::assert_compatibility();
152
153 let size = size_of::<VAL>();
154 let (raw_ptr, meta_len, meta) = decompose_pointer(check_fat_pointer(&val, get_ref));
155 let info = &meta[..meta_len];
156
157 let req_words = BUF::round_to_words(size_of_val(info)) + BUF::round_to_words(size);
159 if self.data.extend(req_words).is_err() {
160 return Err(val);
161 }
162 unsafe {
164 ptr::drop_in_place::<DST>(&mut **self);
165 self.write_value(raw_ptr, size_of::<VAL>(), info);
166 }
167 Ok(())
168 }
169}
170
171impl<BUF: DstBuf> DstValue<str, BUF> {
173 pub fn empty_str() -> Result<Self, ()>
175 where
176 BUF: Default,
177 {
178 Self::empty_str_in_buffer(Default::default())
179 }
180
181 pub fn empty_str_in_buffer(buffer: BUF) -> Result<Self, ()> {
183 let rv = unsafe {
184 let (raw_ptr, meta_len, meta) = decompose_pointer("");
185
186 DstValue::new_raw(&meta[..meta_len], raw_ptr.cast_mut(), 0, buffer)
187 };
188 match rv {
189 Some(r) => Ok(r),
190 None => Err(()),
191 }
192 }
193
194 pub fn new_str(v: &str) -> Result<Self, &str>
203 where
204 BUF: Default,
205 {
206 Self::new_str_in_buffer(Default::default(), v)
207 }
208
209 pub fn new_str_in_buffer(buffer: BUF, val: &str) -> Result<Self, &str> {
219 let rv = unsafe {
220 let (raw_ptr, meta_len, meta) = decompose_pointer(val);
221
222 DstValue::new_raw(&meta[..meta_len], raw_ptr.cast_mut(), size_of_val(val), buffer)
223 };
224 match rv {
225 Some(r) => Ok(r),
226 None => Err(val),
227 }
228 }
229
230 pub fn append_str(&mut self, val: &str) -> Result<(), ()> {
240 let info_words = BUF::round_to_words(size_of::<usize>());
241
242 let ofs = self.len();
243
244 let req_words = BUF::round_to_words(ofs + val.len()) + info_words;
246 if self.data.extend(req_words).is_err() {
247 return Err(());
248 }
249
250 let data = self.data.as_mut();
252 let info_ofs = data.len() - info_words;
253
254 unsafe {
255 ptr::copy_nonoverlapping(
256 val.as_ptr(),
257 (data.as_mut_ptr() as *mut u8).add(ofs),
258 val.len(),
259 );
260 store_metadata(&mut data[info_ofs..], &[ofs + val.len()]);
261 }
262
263 Ok(())
264 }
265
266 pub fn truncate(&mut self, len: usize) {
276 if len < self.len() {
277 let _ = &self[..][len..]; let info_words = BUF::round_to_words(size_of::<usize>());
280 let data = self.data.as_mut();
281 let info_ofs = data.len() - info_words;
282 store_metadata(&mut data[info_ofs..], &[len]);
283 }
284 }
285}
286
287impl<I, BUF: DstBuf> DstValue<[I], BUF>
289where
290 (I, BUF::Inner): MemAligned,
291{
292 pub fn empty_slice() -> Result<Self, ()>
297 where
298 BUF: Default,
299 {
300 Self::empty_slice_with_buffer(Default::default())
301 }
302 pub fn empty_slice_with_buffer(mut buffer: BUF) -> Result<Self, ()> {
307 <(I, BUF::Inner) as MemAligned>::assert_compatibility();
308
309 let info_words = BUF::round_to_words(size_of::<usize>());
310 let req_words = info_words;
311 if buffer.extend(req_words).is_err() {
312 return Err(());
313 }
314 assert!(req_words <= buffer.as_ref().len());
315
316 let mut rv = DstValue { _pd: marker::PhantomData, data: buffer };
317
318 let data = rv.data.as_mut();
319 let info_ofs = data.len() - info_words;
320 let (_data_dst, info_dst) = data.split_at_mut(info_ofs);
321
322 store_metadata(info_dst, &[0]);
323 Ok(rv)
324 }
325
326 pub fn append(&mut self, v: I) -> Result<(), I> {
328 let info_words = BUF::round_to_words(size_of::<usize>());
329
330 let ofs = self.len();
331
332 let req_words = BUF::round_to_words((ofs + 1) * size_of::<I>()) + info_words;
334 if self.data.extend(req_words).is_err() {
335 return Err(v);
336 }
337 let data = self.data.as_mut();
338 assert!(req_words <= data.len());
339 unsafe {
342 let data_ptr = (data.as_ptr() as *mut I).add(ofs);
343 ptr::write(data_ptr, v);
344 }
345 let info_ofs = data.len() - info_words;
347 store_metadata(&mut data[info_ofs..], &[ofs + 1]);
348
349 Ok(())
350 }
351
352 pub fn appended(mut self, v: I) -> Result<Self, (Self, I)> {
354 match self.append(v) {
355 Ok(()) => Ok(self),
356 Err(v) => Err((self, v)),
357 }
358 }
359
360 pub fn extend<It: Iterator<Item = I>>(&mut self, mut iter: It) -> Result<(), (I, It)> {
362 while let Some(v) = iter.next() {
363 match self.append(v) {
364 Ok(()) => {}
365 Err(v) => return Err((v, iter)),
366 }
367 }
368 Ok(())
369 }
370 pub fn extended<It: Iterator<Item = I>>(mut self, iter: It) -> Result<Self, (Self, I, It)> {
372 match self.extend(iter) {
373 Ok(()) => Ok(self),
374 Err((v, iter)) => Err((self, v, iter)),
375 }
376 }
377
378 pub fn pop(&mut self) -> Option<I> {
380 if !self.is_empty() {
381 let ofs = self.len() - 1;
382 let data = self.data.as_mut();
383 let info_words = BUF::round_to_words(size_of::<usize>());
384 let info_ofs = data.len() - info_words;
385 unsafe {
386 store_metadata(&mut data[info_ofs..], &[ofs]);
387 Some(ptr::read((data.as_ptr() as *const I).add(ofs)))
388 }
389 } else {
390 None
391 }
392 }
393}
394
395impl<DST: ?Sized, BUF: DstBuf> DstValue<DST, BUF> {
398 unsafe fn write_value(&mut self, data: *const (), size: usize, info: &[usize]) {
399 let info_words = BUF::round_to_words(size_of_val(info));
400 let req_words = info_words + BUF::round_to_words(size);
401 let buf = self.data.as_mut();
402 assert!(req_words <= buf.len());
403
404 {
407 let info_ofs = buf.len() - info_words;
408 let info_dst = &mut buf[info_ofs..];
409 store_metadata(info_dst, info);
410 }
411
412 unsafe {
414 ptr::copy_nonoverlapping(data as *const u8, buf.as_mut_ptr() as *mut u8, size);
415 }
416 }
417
418 unsafe fn as_ptr(&self) -> *mut DST {
420 let data = self.data.as_ref();
421 let info_size = size_of::<*mut DST>() / size_of::<usize>() - 1;
422 let info_ofs = data.len() - BUF::round_to_words(info_size * size_of::<usize>());
423 let (data, meta) = data.split_at(info_ofs);
424 unsafe { super::make_fat_ptr(data.as_ptr() as *mut (), meta) }
426 }
427
428 unsafe fn as_ptr_mut(&mut self) -> *mut DST {
430 let data = self.data.as_mut();
431 let info_size = size_of::<*mut DST>() / size_of::<usize>() - 1;
432 let info_ofs = data.len() - BUF::round_to_words(info_size * size_of::<usize>());
433 let (data, meta) = data.split_at_mut(info_ofs);
434 unsafe { super::make_fat_ptr(data.as_mut_ptr() as *mut (), meta) }
436 }
437}
438
439mod core_impls {
440 use super::{DstBuf, DstValue};
441 use core::{fmt, future, iter, ops, pin, ptr, task};
442
443 impl<DST: ?Sized, BUF: DstBuf> ops::Deref for DstValue<DST, BUF> {
444 type Target = DST;
445 #[must_use]
446 fn deref(&self) -> &DST {
447 unsafe { &*self.as_ptr() }
448 }
449 }
450 impl<DST: ?Sized, BUF: DstBuf> ops::DerefMut for DstValue<DST, BUF> {
451 #[must_use]
452 fn deref_mut(&mut self) -> &mut DST {
453 unsafe { &mut *self.as_ptr_mut() }
454 }
455 }
456 impl<DST: ?Sized, BUF: DstBuf> ops::Drop for DstValue<DST, BUF> {
457 fn drop(&mut self) {
458 unsafe { ptr::drop_in_place(&mut **self) }
459 }
460 }
461
462 macro_rules! impl_trait {
463 ( $t:path; $($body:tt)* ) => {
464 impl<BUF: DstBuf, DST: ?Sized> $t for DstValue<DST, BUF> where DST: $t { $( $body )* }
465 }
466 }
467
468 impl_trait! { future::Future;
469 type Output = DST::Output;
470 fn poll(self: pin::Pin<&mut Self>, cx: &mut task::Context) -> task::Poll<Self::Output> {
471 unsafe { pin::Pin::new_unchecked(&mut **self.get_unchecked_mut()).poll(cx) }
472 }
473 }
474 impl_trait! { iter::Iterator;
475 type Item = DST::Item;
476 #[must_use]
477 fn next(&mut self) -> Option<Self::Item> {
478 (**self).next()
479 }
480 }
481 impl_trait! { iter::DoubleEndedIterator;
482 #[must_use]
483 fn next_back(&mut self) -> Option<Self::Item> {
484 (**self).next_back()
485 }
486 }
487 impl_trait! { iter::ExactSizeIterator; }
488
489 macro_rules! impl_fmt {
490 ( $( $t:ident )* ) => {
491 $(
492 impl_trait!{ fmt::$t;
493 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
494 (**self).fmt(f)
495 }
496 }
497 )*
498 }
499 }
500 impl_fmt! {
501 Display Debug UpperHex LowerHex
502 }
503}