bytemuck/allocation.rs
1#![cfg(feature = "extern_crate_alloc")]
2#![allow(clippy::duplicated_attributes)]
3
4//! Stuff to boost things in the `alloc` crate.
5//!
6//! * You must enable the `extern_crate_alloc` feature of `bytemuck` or you will
7//! not be able to use this module! This is generally done by adding the
8//! feature to the dependency in Cargo.toml like so:
9//!
10//! `bytemuck = { version = "VERSION_YOU_ARE_USING", features =
11//! ["extern_crate_alloc"]}`
12
13use super::*;
14#[cfg(target_has_atomic = "ptr")]
15use alloc::sync::Arc;
16use alloc::{
17 alloc::{alloc_zeroed, Layout},
18 boxed::Box,
19 rc::Rc,
20 vec,
21 vec::Vec,
22};
23use core::{
24 mem::{size_of_val, ManuallyDrop},
25 ops::{Deref, DerefMut},
26};
27
28/// As [`try_cast_box`], but unwraps for you.
29#[inline]
30pub fn cast_box<A: NoUninit, B: AnyBitPattern>(input: Box<A>) -> Box<B> {
31 try_cast_box(input).map_err(|(e, _v)| e).unwrap()
32}
33
34/// Attempts to cast the content type of a [`Box`].
35///
36/// On failure you get back an error along with the starting `Box`.
37///
38/// ## Failure
39///
40/// * The start and end content type of the `Box` must have the exact same
41/// alignment.
42/// * The start and end size of the `Box` must have the exact same size.
43#[inline]
44pub fn try_cast_box<A: NoUninit, B: AnyBitPattern>(
45 input: Box<A>,
46) -> Result<Box<B>, (PodCastError, Box<A>)> {
47 if align_of::<A>() != align_of::<B>() {
48 Err((PodCastError::AlignmentMismatch, input))
49 } else if size_of::<A>() != size_of::<B>() {
50 Err((PodCastError::SizeMismatch, input))
51 } else {
52 // Note(Lokathor): This is much simpler than with the Vec casting!
53 let ptr: *mut B = Box::into_raw(input) as *mut B;
54 Ok(unsafe { Box::from_raw(ptr) })
55 }
56}
57
58/// Allocates a `Box<T>` with all of the contents being zeroed out.
59///
60/// This uses the global allocator to create a zeroed allocation and _then_
61/// turns it into a Box. In other words, it's 100% assured that the zeroed data
62/// won't be put temporarily on the stack. You can make a box of any size
63/// without fear of a stack overflow.
64///
65/// ## Failure
66///
67/// This fails if the allocation fails.
68#[inline]
69pub fn try_zeroed_box<T: Zeroable>() -> Result<Box<T>, ()> {
70 if size_of::<T>() == 0 {
71 // This will not allocate but simply create an arbitrary non-null
72 // aligned pointer, valid for Box for a zero-sized pointee.
73 let ptr = core::ptr::NonNull::dangling().as_ptr();
74 return Ok(unsafe { Box::from_raw(ptr) });
75 }
76 let layout = Layout::new::<T>();
77 let ptr = unsafe { alloc_zeroed(layout) };
78 if ptr.is_null() {
79 // we don't know what the error is because `alloc_zeroed` is a dumb API
80 Err(())
81 } else {
82 Ok(unsafe { Box::<T>::from_raw(ptr as *mut T) })
83 }
84}
85
86/// As [`try_zeroed_box`], but unwraps for you.
87#[inline]
88pub fn zeroed_box<T: Zeroable>() -> Box<T> {
89 try_zeroed_box().unwrap()
90}
91
92/// Allocates a `Vec<T>` of length and capacity exactly equal to `length` and
93/// all elements zeroed.
94///
95/// ## Failure
96///
97/// This fails if the allocation fails, or if a layout cannot be calculated for
98/// the allocation.
99pub fn try_zeroed_vec<T: Zeroable>(length: usize) -> Result<Vec<T>, ()> {
100 if length == 0 {
101 Ok(Vec::new())
102 } else {
103 let boxed_slice = try_zeroed_slice_box(length)?;
104 Ok(boxed_slice.into_vec())
105 }
106}
107
108/// As [`try_zeroed_vec`] but unwraps for you
109pub fn zeroed_vec<T: Zeroable>(length: usize) -> Vec<T> {
110 try_zeroed_vec(length).unwrap()
111}
112
113/// Allocates a `Box<[T]>` with all contents being zeroed out.
114///
115/// This uses the global allocator to create a zeroed allocation and _then_
116/// turns it into a Box. In other words, it's 100% assured that the zeroed data
117/// won't be put temporarily on the stack. You can make a box of any size
118/// without fear of a stack overflow.
119///
120/// ## Failure
121///
122/// This fails if the allocation fails, or if a layout cannot be calculated for
123/// the allocation.
124#[inline]
125pub fn try_zeroed_slice_box<T: Zeroable>(
126 length: usize,
127) -> Result<Box<[T]>, ()> {
128 if size_of::<T>() == 0 || length == 0 {
129 // This will not allocate but simply create an arbitrary non-null aligned
130 // slice pointer, valid for Box for a zero-sized pointee.
131 let ptr = core::ptr::NonNull::dangling().as_ptr();
132 let slice_ptr = core::ptr::slice_from_raw_parts_mut(ptr, length);
133 return Ok(unsafe { Box::from_raw(slice_ptr) });
134 }
135 let layout = core::alloc::Layout::array::<T>(length).map_err(|_| ())?;
136 let ptr = unsafe { alloc_zeroed(layout) };
137 if ptr.is_null() {
138 // we don't know what the error is because `alloc_zeroed` is a dumb API
139 Err(())
140 } else {
141 let slice =
142 unsafe { core::slice::from_raw_parts_mut(ptr as *mut T, length) };
143 Ok(unsafe { Box::<[T]>::from_raw(slice) })
144 }
145}
146
147/// As [`try_zeroed_slice_box`], but unwraps for you.
148pub fn zeroed_slice_box<T: Zeroable>(length: usize) -> Box<[T]> {
149 try_zeroed_slice_box(length).unwrap()
150}
151
152/// Allocates a `Arc<T>` with all contents being zeroed out.
153#[cfg(all(feature = "alloc_uninit", target_has_atomic = "ptr"))]
154pub fn zeroed_arc<T: Zeroable>() -> Arc<T> {
155 let mut arc = Arc::new_uninit();
156 crate::write_zeroes(Arc::get_mut(&mut arc).unwrap()); // unwrap never fails for a newly allocated Arc
157 unsafe { arc.assume_init() }
158}
159
160/// Allocates a `Arc<[T]>` with all contents being zeroed out.
161#[cfg(all(feature = "alloc_uninit", target_has_atomic = "ptr"))]
162pub fn zeroed_arc_slice<T: Zeroable>(length: usize) -> Arc<[T]> {
163 let mut arc = Arc::new_uninit_slice(length);
164 crate::fill_zeroes(Arc::get_mut(&mut arc).unwrap()); // unwrap never fails for a newly allocated Arc
165 unsafe { arc.assume_init() }
166}
167
168/// Allocates a `Rc<T>` with all contents being zeroed out.
169#[cfg(feature = "alloc_uninit")]
170pub fn zeroed_rc<T: Zeroable>() -> Rc<T> {
171 let mut rc = Rc::new_uninit();
172 crate::write_zeroes(Rc::get_mut(&mut rc).unwrap()); // unwrap never fails for a newly allocated Rc
173 unsafe { rc.assume_init() }
174}
175
176/// Allocates a `Rc<[T]>` with all contents being zeroed out.
177#[cfg(feature = "alloc_uninit")]
178pub fn zeroed_rc_slice<T: Zeroable>(length: usize) -> Rc<[T]> {
179 let mut rc = Rc::new_uninit_slice(length);
180 crate::fill_zeroes(Rc::get_mut(&mut rc).unwrap()); // unwrap never fails for a newly allocated Rc
181 unsafe { rc.assume_init() }
182}
183
184/// As [`try_cast_slice_box`], but unwraps for you.
185#[inline]
186pub fn cast_slice_box<A: NoUninit, B: AnyBitPattern>(
187 input: Box<[A]>,
188) -> Box<[B]> {
189 try_cast_slice_box(input).map_err(|(e, _v)| e).unwrap()
190}
191
192/// Attempts to cast the content type of a `Box<[T]>`.
193///
194/// On failure you get back an error along with the starting `Box<[T]>`.
195///
196/// ## Failure
197///
198/// * The start and end content type of the `Box<[T]>` must have the exact same
199/// alignment.
200/// * The start and end content size in bytes of the `Box<[T]>` must be the
201/// exact same.
202#[inline]
203pub fn try_cast_slice_box<A: NoUninit, B: AnyBitPattern>(
204 input: Box<[A]>,
205) -> Result<Box<[B]>, (PodCastError, Box<[A]>)> {
206 if align_of::<A>() != align_of::<B>() {
207 Err((PodCastError::AlignmentMismatch, input))
208 } else if size_of::<A>() != size_of::<B>() {
209 let input_bytes = size_of_val::<[A]>(&*input);
210 if (size_of::<B>() == 0 && input_bytes != 0)
211 || (size_of::<B>() != 0 && input_bytes % size_of::<B>() != 0)
212 {
213 // If the size in bytes of the underlying buffer does not match an exact
214 // multiple of the size of B, we cannot cast between them.
215 Err((PodCastError::OutputSliceWouldHaveSlop, input))
216 } else {
217 // Because the size is an exact multiple, we can now change the length
218 // of the slice and recreate the Box
219 // NOTE: This is a valid operation because according to the docs of
220 // std::alloc::GlobalAlloc::dealloc(), the Layout that was used to alloc
221 // the block must be the same Layout that is used to dealloc the block.
222 // Luckily, Layout only stores two things, the alignment, and the size in
223 // bytes. So as long as both of those stay the same, the Layout will
224 // remain a valid input to dealloc.
225 let length =
226 if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 };
227 let box_ptr: *mut A = Box::into_raw(input) as *mut A;
228 let ptr: *mut [B] =
229 unsafe { core::slice::from_raw_parts_mut(box_ptr as *mut B, length) };
230 Ok(unsafe { Box::<[B]>::from_raw(ptr) })
231 }
232 } else {
233 let box_ptr: *mut [A] = Box::into_raw(input);
234 let ptr: *mut [B] = box_ptr as *mut [B];
235 Ok(unsafe { Box::<[B]>::from_raw(ptr) })
236 }
237}
238
239/// As [`try_cast_vec`], but unwraps for you.
240#[inline]
241pub fn cast_vec<A: NoUninit, B: AnyBitPattern>(input: Vec<A>) -> Vec<B> {
242 try_cast_vec(input).map_err(|(e, _v)| e).unwrap()
243}
244
245/// Attempts to cast the content type of a [`Vec`].
246///
247/// On failure you get back an error along with the starting `Vec`.
248///
249/// ## Failure
250///
251/// * The start and end content type of the `Vec` must have the exact same
252/// alignment.
253/// * The start and end content size in bytes of the `Vec` must be the exact
254/// same.
255/// * The start and end capacity in bytes of the `Vec` must be the exact same.
256#[inline]
257pub fn try_cast_vec<A: NoUninit, B: AnyBitPattern>(
258 input: Vec<A>,
259) -> Result<Vec<B>, (PodCastError, Vec<A>)> {
260 if align_of::<A>() != align_of::<B>() {
261 Err((PodCastError::AlignmentMismatch, input))
262 } else if size_of::<A>() != size_of::<B>() {
263 let input_size = size_of_val::<[A]>(&*input);
264 let input_capacity = input.capacity() * size_of::<A>();
265 if (size_of::<B>() == 0 && input_capacity != 0)
266 || (size_of::<B>() != 0
267 && (input_size % size_of::<B>() != 0
268 || input_capacity % size_of::<B>() != 0))
269 {
270 // If the size in bytes of the underlying buffer does not match an exact
271 // multiple of the size of B, we cannot cast between them.
272 // Note that we have to pay special attention to make sure that both
273 // length and capacity are valid under B, as we do not want to
274 // change which bytes are considered part of the initialized slice
275 // of the Vec
276 Err((PodCastError::OutputSliceWouldHaveSlop, input))
277 } else {
278 // Because the size is an exact multiple, we can now change the length and
279 // capacity and recreate the Vec
280 // NOTE: This is a valid operation because according to the docs of
281 // std::alloc::GlobalAlloc::dealloc(), the Layout that was used to alloc
282 // the block must be the same Layout that is used to dealloc the block.
283 // Luckily, Layout only stores two things, the alignment, and the size in
284 // bytes. So as long as both of those stay the same, the Layout will
285 // remain a valid input to dealloc.
286
287 // Note(Lokathor): First we record the length and capacity, which don't
288 // have any secret provenance metadata.
289 let length: usize =
290 if size_of::<B>() != 0 { input_size / size_of::<B>() } else { 0 };
291 let capacity: usize =
292 if size_of::<B>() != 0 { input_capacity / size_of::<B>() } else { 0 };
293 // Note(Lokathor): Next we "pre-forget" the old Vec by wrapping with
294 // ManuallyDrop, because if we used `core::mem::forget` after taking the
295 // pointer then that would invalidate our pointer. In nightly there's a
296 // "into raw parts" method, which we can switch this too eventually.
297 let mut manual_drop_vec = ManuallyDrop::new(input);
298 let vec_ptr: *mut A = manual_drop_vec.as_mut_ptr();
299 let ptr: *mut B = vec_ptr as *mut B;
300 Ok(unsafe { Vec::from_raw_parts(ptr, length, capacity) })
301 }
302 } else {
303 // Note(Lokathor): First we record the length and capacity, which don't have
304 // any secret provenance metadata.
305 let length: usize = input.len();
306 let capacity: usize = input.capacity();
307 // Note(Lokathor): Next we "pre-forget" the old Vec by wrapping with
308 // ManuallyDrop, because if we used `core::mem::forget` after taking the
309 // pointer then that would invalidate our pointer. In nightly there's a
310 // "into raw parts" method, which we can switch this too eventually.
311 let mut manual_drop_vec = ManuallyDrop::new(input);
312 let vec_ptr: *mut A = manual_drop_vec.as_mut_ptr();
313 let ptr: *mut B = vec_ptr as *mut B;
314 Ok(unsafe { Vec::from_raw_parts(ptr, length, capacity) })
315 }
316}
317
318/// This "collects" a slice of pod data into a vec of a different pod type.
319///
320/// Unlike with [`cast_slice`] and [`cast_slice_mut`], this will always work.
321///
322/// The output vec will be of a minimal size/capacity to hold the slice given.
323///
324/// ```rust
325/// # use bytemuck::*;
326/// let halfwords: [u16; 4] = [5, 6, 7, 8];
327/// let vec_of_words: Vec<u32> = pod_collect_to_vec(&halfwords);
328/// if cfg!(target_endian = "little") {
329/// assert_eq!(&vec_of_words[..], &[0x0006_0005, 0x0008_0007][..])
330/// } else {
331/// assert_eq!(&vec_of_words[..], &[0x0005_0006, 0x0007_0008][..])
332/// }
333/// ```
334pub fn pod_collect_to_vec<A: NoUninit, B: NoUninit + AnyBitPattern>(
335 src: &[A],
336) -> Vec<B> {
337 let src_size = core::mem::size_of_val(src);
338 // Note(Lokathor): dst_count is rounded up so that the dest will always be at
339 // least as many bytes as the src.
340 let dst_count = src_size / size_of::<B>()
341 + if src_size % size_of::<B>() != 0 { 1 } else { 0 };
342 let mut dst = vec![B::zeroed(); dst_count];
343
344 let src_bytes: &[u8] = cast_slice(src);
345 let dst_bytes: &mut [u8] = cast_slice_mut(&mut dst[..]);
346 dst_bytes[..src_size].copy_from_slice(src_bytes);
347 dst
348}
349
350/// As [`try_cast_rc`], but unwraps for you.
351#[inline]
352pub fn cast_rc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>(
353 input: Rc<A>,
354) -> Rc<B> {
355 try_cast_rc(input).map_err(|(e, _v)| e).unwrap()
356}
357
358/// Attempts to cast the content type of a [`Rc`].
359///
360/// On failure you get back an error along with the starting `Rc`.
361///
362/// The bounds on this function are the same as [`cast_mut`], because a user
363/// could call `Rc::get_unchecked_mut` on the output, which could be observable
364/// in the input.
365///
366/// ## Failure
367///
368/// * The start and end content type of the `Rc` must have the exact same
369/// alignment.
370/// * The start and end size of the `Rc` must have the exact same size.
371#[inline]
372pub fn try_cast_rc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>(
373 input: Rc<A>,
374) -> Result<Rc<B>, (PodCastError, Rc<A>)> {
375 if align_of::<A>() != align_of::<B>() {
376 Err((PodCastError::AlignmentMismatch, input))
377 } else if size_of::<A>() != size_of::<B>() {
378 Err((PodCastError::SizeMismatch, input))
379 } else {
380 // Safety: Rc::from_raw requires size and alignment match, which is met.
381 let ptr: *const B = Rc::into_raw(input) as *const B;
382 Ok(unsafe { Rc::from_raw(ptr) })
383 }
384}
385
386/// As [`try_cast_arc`], but unwraps for you.
387#[inline]
388#[cfg(target_has_atomic = "ptr")]
389pub fn cast_arc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>(
390 input: Arc<A>,
391) -> Arc<B> {
392 try_cast_arc(input).map_err(|(e, _v)| e).unwrap()
393}
394
395/// Attempts to cast the content type of a [`Arc`].
396///
397/// On failure you get back an error along with the starting `Arc`.
398///
399/// The bounds on this function are the same as [`cast_mut`], because a user
400/// could call `Rc::get_unchecked_mut` on the output, which could be observable
401/// in the input.
402///
403/// ## Failure
404///
405/// * The start and end content type of the `Arc` must have the exact same
406/// alignment.
407/// * The start and end size of the `Arc` must have the exact same size.
408#[inline]
409#[cfg(target_has_atomic = "ptr")]
410pub fn try_cast_arc<
411 A: NoUninit + AnyBitPattern,
412 B: NoUninit + AnyBitPattern,
413>(
414 input: Arc<A>,
415) -> Result<Arc<B>, (PodCastError, Arc<A>)> {
416 if align_of::<A>() != align_of::<B>() {
417 Err((PodCastError::AlignmentMismatch, input))
418 } else if size_of::<A>() != size_of::<B>() {
419 Err((PodCastError::SizeMismatch, input))
420 } else {
421 // Safety: Arc::from_raw requires size and alignment match, which is met.
422 let ptr: *const B = Arc::into_raw(input) as *const B;
423 Ok(unsafe { Arc::from_raw(ptr) })
424 }
425}
426
427/// As [`try_cast_slice_rc`], but unwraps for you.
428#[inline]
429pub fn cast_slice_rc<
430 A: NoUninit + AnyBitPattern,
431 B: NoUninit + AnyBitPattern,
432>(
433 input: Rc<[A]>,
434) -> Rc<[B]> {
435 try_cast_slice_rc(input).map_err(|(e, _v)| e).unwrap()
436}
437
438/// Attempts to cast the content type of a `Rc<[T]>`.
439///
440/// On failure you get back an error along with the starting `Rc<[T]>`.
441///
442/// The bounds on this function are the same as [`cast_mut`], because a user
443/// could call `Rc::get_unchecked_mut` on the output, which could be observable
444/// in the input.
445///
446/// ## Failure
447///
448/// * The start and end content type of the `Rc<[T]>` must have the exact same
449/// alignment.
450/// * The start and end content size in bytes of the `Rc<[T]>` must be the exact
451/// same.
452#[inline]
453pub fn try_cast_slice_rc<
454 A: NoUninit + AnyBitPattern,
455 B: NoUninit + AnyBitPattern,
456>(
457 input: Rc<[A]>,
458) -> Result<Rc<[B]>, (PodCastError, Rc<[A]>)> {
459 if align_of::<A>() != align_of::<B>() {
460 Err((PodCastError::AlignmentMismatch, input))
461 } else if size_of::<A>() != size_of::<B>() {
462 let input_bytes = size_of_val::<[A]>(&*input);
463 if (size_of::<B>() == 0 && input_bytes != 0)
464 || (size_of::<B>() != 0 && input_bytes % size_of::<B>() != 0)
465 {
466 // If the size in bytes of the underlying buffer does not match an exact
467 // multiple of the size of B, we cannot cast between them.
468 Err((PodCastError::OutputSliceWouldHaveSlop, input))
469 } else {
470 // Because the size is an exact multiple, we can now change the length
471 // of the slice and recreate the Rc
472 // NOTE: This is a valid operation because according to the docs of
473 // std::rc::Rc::from_raw(), the type U that was in the original Rc<U>
474 // acquired from Rc::into_raw() must have the same size alignment and
475 // size of the type T in the new Rc<T>. So as long as both the size
476 // and alignment stay the same, the Rc will remain a valid Rc.
477 let length =
478 if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 };
479 let rc_ptr: *const A = Rc::into_raw(input) as *const A;
480 // Must use ptr::slice_from_raw_parts, because we cannot make an
481 // intermediate const reference, because it has mutable provenance,
482 // nor an intermediate mutable reference, because it could be aliased.
483 let ptr = core::ptr::slice_from_raw_parts(rc_ptr as *const B, length);
484 Ok(unsafe { Rc::<[B]>::from_raw(ptr) })
485 }
486 } else {
487 let rc_ptr: *const [A] = Rc::into_raw(input);
488 let ptr: *const [B] = rc_ptr as *const [B];
489 Ok(unsafe { Rc::<[B]>::from_raw(ptr) })
490 }
491}
492
493/// As [`try_cast_slice_arc`], but unwraps for you.
494#[inline]
495#[cfg(target_has_atomic = "ptr")]
496pub fn cast_slice_arc<
497 A: NoUninit + AnyBitPattern,
498 B: NoUninit + AnyBitPattern,
499>(
500 input: Arc<[A]>,
501) -> Arc<[B]> {
502 try_cast_slice_arc(input).map_err(|(e, _v)| e).unwrap()
503}
504
505/// Attempts to cast the content type of a `Arc<[T]>`.
506///
507/// On failure you get back an error along with the starting `Arc<[T]>`.
508///
509/// The bounds on this function are the same as [`cast_mut`], because a user
510/// could call `Rc::get_unchecked_mut` on the output, which could be observable
511/// in the input.
512///
513/// ## Failure
514///
515/// * The start and end content type of the `Arc<[T]>` must have the exact same
516/// alignment.
517/// * The start and end content size in bytes of the `Arc<[T]>` must be the
518/// exact same.
519#[inline]
520#[cfg(target_has_atomic = "ptr")]
521pub fn try_cast_slice_arc<
522 A: NoUninit + AnyBitPattern,
523 B: NoUninit + AnyBitPattern,
524>(
525 input: Arc<[A]>,
526) -> Result<Arc<[B]>, (PodCastError, Arc<[A]>)> {
527 if align_of::<A>() != align_of::<B>() {
528 Err((PodCastError::AlignmentMismatch, input))
529 } else if size_of::<A>() != size_of::<B>() {
530 let input_bytes = size_of_val::<[A]>(&*input);
531 if (size_of::<B>() == 0 && input_bytes != 0)
532 || (size_of::<B>() != 0 && input_bytes % size_of::<B>() != 0)
533 {
534 // If the size in bytes of the underlying buffer does not match an exact
535 // multiple of the size of B, we cannot cast between them.
536 Err((PodCastError::OutputSliceWouldHaveSlop, input))
537 } else {
538 // Because the size is an exact multiple, we can now change the length
539 // of the slice and recreate the Arc
540 // NOTE: This is a valid operation because according to the docs of
541 // std::sync::Arc::from_raw(), the type U that was in the original Arc<U>
542 // acquired from Arc::into_raw() must have the same size alignment and
543 // size of the type T in the new Arc<T>. So as long as both the size
544 // and alignment stay the same, the Arc will remain a valid Arc.
545 let length =
546 if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 };
547 let arc_ptr: *const A = Arc::into_raw(input) as *const A;
548 // Must use ptr::slice_from_raw_parts, because we cannot make an
549 // intermediate const reference, because it has mutable provenance,
550 // nor an intermediate mutable reference, because it could be aliased.
551 let ptr = core::ptr::slice_from_raw_parts(arc_ptr as *const B, length);
552 Ok(unsafe { Arc::<[B]>::from_raw(ptr) })
553 }
554 } else {
555 let arc_ptr: *const [A] = Arc::into_raw(input);
556 let ptr: *const [B] = arc_ptr as *const [B];
557 Ok(unsafe { Arc::<[B]>::from_raw(ptr) })
558 }
559}
560
561/// An extension trait for `TransparentWrapper` and alloc types.
562pub trait TransparentWrapperAlloc<Inner: ?Sized>:
563 TransparentWrapper<Inner>
564{
565 /// Convert a vec of the inner type into a vec of the wrapper type.
566 fn wrap_vec(s: Vec<Inner>) -> Vec<Self>
567 where
568 Self: Sized,
569 Inner: Sized,
570 {
571 let mut s = ManuallyDrop::new(s);
572
573 let length = s.len();
574 let capacity = s.capacity();
575 let ptr = s.as_mut_ptr();
576
577 unsafe {
578 // SAFETY:
579 // * ptr comes from Vec (and will not be double-dropped)
580 // * the two types have the identical representation
581 // * the len and capacity fields are valid
582 Vec::from_raw_parts(ptr as *mut Self, length, capacity)
583 }
584 }
585
586 /// Convert a box to the inner type into a box to the wrapper
587 /// type.
588 #[inline]
589 fn wrap_box(s: Box<Inner>) -> Box<Self> {
590 // The unsafe contract requires that these two have
591 // identical representations, and thus identical pointer metadata.
592 // Assert that Self and Inner have the same pointer size,
593 // which is the best we can do to assert their metadata is the same type
594 // on stable.
595 assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
596
597 unsafe {
598 // A pointer cast doesn't work here because rustc can't tell that
599 // the vtables match (because of the `?Sized` restriction relaxation).
600 // A `transmute` doesn't work because the sizes are unspecified.
601 //
602 // SAFETY:
603 // * The unsafe contract requires that pointers to Inner and Self have
604 // identical representations
605 // * Box is guaranteed to have representation identical to a (non-null)
606 // pointer
607 // * The pointer comes from a box (and thus satisfies all safety
608 // requirements of Box)
609 let inner_ptr: *mut Inner = Box::into_raw(s);
610 let wrapper_ptr: *mut Self = transmute!(inner_ptr);
611 Box::from_raw(wrapper_ptr)
612 }
613 }
614
615 /// Convert an [`Rc`] to the inner type into an `Rc` to the wrapper type.
616 #[inline]
617 fn wrap_rc(s: Rc<Inner>) -> Rc<Self> {
618 // The unsafe contract requires that these two have
619 // identical representations, and thus identical pointer metadata.
620 // Assert that Self and Inner have the same pointer size,
621 // which is the best we can do to assert their metadata is the same type
622 // on stable.
623 assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
624
625 unsafe {
626 // A pointer cast doesn't work here because rustc can't tell that
627 // the vtables match (because of the `?Sized` restriction relaxation).
628 // A `transmute` doesn't work because the layout of Rc is unspecified.
629 //
630 // SAFETY:
631 // * The unsafe contract requires that pointers to Inner and Self have
632 // identical representations, and that the size and alignment of Inner
633 // and Self are the same, which meets the safety requirements of
634 // Rc::from_raw
635 let inner_ptr: *const Inner = Rc::into_raw(s);
636 let wrapper_ptr: *const Self = transmute!(inner_ptr);
637 Rc::from_raw(wrapper_ptr)
638 }
639 }
640
641 /// Convert an [`Arc`] to the inner type into an `Arc` to the wrapper type.
642 #[inline]
643 #[cfg(target_has_atomic = "ptr")]
644 fn wrap_arc(s: Arc<Inner>) -> Arc<Self> {
645 // The unsafe contract requires that these two have
646 // identical representations, and thus identical pointer metadata.
647 // Assert that Self and Inner have the same pointer size,
648 // which is the best we can do to assert their metadata is the same type
649 // on stable.
650 assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
651
652 unsafe {
653 // A pointer cast doesn't work here because rustc can't tell that
654 // the vtables match (because of the `?Sized` restriction relaxation).
655 // A `transmute` doesn't work because the layout of Arc is unspecified.
656 //
657 // SAFETY:
658 // * The unsafe contract requires that pointers to Inner and Self have
659 // identical representations, and that the size and alignment of Inner
660 // and Self are the same, which meets the safety requirements of
661 // Arc::from_raw
662 let inner_ptr: *const Inner = Arc::into_raw(s);
663 let wrapper_ptr: *const Self = transmute!(inner_ptr);
664 Arc::from_raw(wrapper_ptr)
665 }
666 }
667
668 /// Convert a vec of the wrapper type into a vec of the inner type.
669 fn peel_vec(s: Vec<Self>) -> Vec<Inner>
670 where
671 Self: Sized,
672 Inner: Sized,
673 {
674 let mut s = ManuallyDrop::new(s);
675
676 let length = s.len();
677 let capacity = s.capacity();
678 let ptr = s.as_mut_ptr();
679
680 unsafe {
681 // SAFETY:
682 // * ptr comes from Vec (and will not be double-dropped)
683 // * the two types have the identical representation
684 // * the len and capacity fields are valid
685 Vec::from_raw_parts(ptr as *mut Inner, length, capacity)
686 }
687 }
688
689 /// Convert a box to the wrapper type into a box to the inner
690 /// type.
691 #[inline]
692 fn peel_box(s: Box<Self>) -> Box<Inner> {
693 // The unsafe contract requires that these two have
694 // identical representations, and thus identical pointer metadata.
695 // Assert that Self and Inner have the same pointer size,
696 // which is the best we can do to assert their metadata is the same type
697 // on stable.
698 assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
699
700 unsafe {
701 // A pointer cast doesn't work here because rustc can't tell that
702 // the vtables match (because of the `?Sized` restriction relaxation).
703 // A `transmute` doesn't work because the sizes are unspecified.
704 //
705 // SAFETY:
706 // * The unsafe contract requires that pointers to Inner and Self have
707 // identical representations
708 // * Box is guaranteed to have representation identical to a (non-null)
709 // pointer
710 // * The pointer comes from a box (and thus satisfies all safety
711 // requirements of Box)
712 let wrapper_ptr: *mut Self = Box::into_raw(s);
713 let inner_ptr: *mut Inner = transmute!(wrapper_ptr);
714 Box::from_raw(inner_ptr)
715 }
716 }
717
718 /// Convert an [`Rc`] to the wrapper type into an `Rc` to the inner type.
719 #[inline]
720 fn peel_rc(s: Rc<Self>) -> Rc<Inner> {
721 // The unsafe contract requires that these two have
722 // identical representations, and thus identical pointer metadata.
723 // Assert that Self and Inner have the same pointer size,
724 // which is the best we can do to assert their metadata is the same type
725 // on stable.
726 assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
727
728 unsafe {
729 // A pointer cast doesn't work here because rustc can't tell that
730 // the vtables match (because of the `?Sized` restriction relaxation).
731 // A `transmute` doesn't work because the layout of Rc is unspecified.
732 //
733 // SAFETY:
734 // * The unsafe contract requires that pointers to Inner and Self have
735 // identical representations, and that the size and alignment of Inner
736 // and Self are the same, which meets the safety requirements of
737 // Rc::from_raw
738 let wrapper_ptr: *const Self = Rc::into_raw(s);
739 let inner_ptr: *const Inner = transmute!(wrapper_ptr);
740 Rc::from_raw(inner_ptr)
741 }
742 }
743
744 /// Convert an [`Arc`] to the wrapper type into an `Arc` to the inner type.
745 #[inline]
746 #[cfg(target_has_atomic = "ptr")]
747 fn peel_arc(s: Arc<Self>) -> Arc<Inner> {
748 // The unsafe contract requires that these two have
749 // identical representations, and thus identical pointer metadata.
750 // Assert that Self and Inner have the same pointer size,
751 // which is the best we can do to assert their metadata is the same type
752 // on stable.
753 assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
754
755 unsafe {
756 // A pointer cast doesn't work here because rustc can't tell that
757 // the vtables match (because of the `?Sized` restriction relaxation).
758 // A `transmute` doesn't work because the layout of Arc is unspecified.
759 //
760 // SAFETY:
761 // * The unsafe contract requires that pointers to Inner and Self have
762 // identical representations, and that the size and alignment of Inner
763 // and Self are the same, which meets the safety requirements of
764 // Arc::from_raw
765 let wrapper_ptr: *const Self = Arc::into_raw(s);
766 let inner_ptr: *const Inner = transmute!(wrapper_ptr);
767 Arc::from_raw(inner_ptr)
768 }
769 }
770}
771
772impl<I: ?Sized, T: ?Sized + TransparentWrapper<I>> TransparentWrapperAlloc<I>
773 for T
774{
775}
776
777/// As `Box<[u8]>`, but remembers the original alignment.
778pub struct BoxBytes {
779 // SAFETY: `ptr` is aligned to `layout.align()`, points to
780 // `layout.size()` initialized bytes, and, if `layout.size() > 0`,
781 // is owned and was allocated with the global allocator with `layout`.
782 ptr: NonNull<u8>,
783 layout: Layout,
784}
785
786impl Deref for BoxBytes {
787 type Target = [u8];
788
789 fn deref(&self) -> &Self::Target {
790 // SAFETY: See type invariant.
791 unsafe {
792 core::slice::from_raw_parts(self.ptr.as_ptr(), self.layout.size())
793 }
794 }
795}
796
797impl DerefMut for BoxBytes {
798 fn deref_mut(&mut self) -> &mut Self::Target {
799 // SAFETY: See type invariant.
800 unsafe {
801 core::slice::from_raw_parts_mut(self.ptr.as_ptr(), self.layout.size())
802 }
803 }
804}
805
806impl Drop for BoxBytes {
807 fn drop(&mut self) {
808 if self.layout.size() != 0 {
809 // SAFETY: See type invariant: if `self.layout.size() != 0`, then
810 // `self.ptr` is owned and was allocated with `self.layout`.
811 unsafe { alloc::alloc::dealloc(self.ptr.as_ptr(), self.layout) };
812 }
813 }
814}
815
816impl<T: ?Sized + sealed::BoxBytesOf> From<Box<T>> for BoxBytes {
817 fn from(value: Box<T>) -> Self {
818 value.box_bytes_of()
819 }
820}
821
822mod sealed {
823 use crate::{BoxBytes, PodCastError};
824 use alloc::boxed::Box;
825
826 pub trait BoxBytesOf {
827 fn box_bytes_of(self: Box<Self>) -> BoxBytes;
828 }
829
830 pub trait FromBoxBytes {
831 fn try_from_box_bytes(
832 bytes: BoxBytes,
833 ) -> Result<Box<Self>, (PodCastError, BoxBytes)>;
834 }
835}
836
837impl<T: NoUninit> sealed::BoxBytesOf for T {
838 fn box_bytes_of(self: Box<Self>) -> BoxBytes {
839 let layout = Layout::new::<T>();
840 let ptr = Box::into_raw(self) as *mut u8;
841 // SAFETY: Box::into_raw() returns a non-null pointer.
842 let ptr = unsafe { NonNull::new_unchecked(ptr) };
843 BoxBytes { ptr, layout }
844 }
845}
846
847impl<T: NoUninit> sealed::BoxBytesOf for [T] {
848 fn box_bytes_of(self: Box<Self>) -> BoxBytes {
849 let layout = Layout::for_value::<[T]>(&self);
850 let ptr = Box::into_raw(self) as *mut u8;
851 // SAFETY: Box::into_raw() returns a non-null pointer.
852 let ptr = unsafe { NonNull::new_unchecked(ptr) };
853 BoxBytes { ptr, layout }
854 }
855}
856
857impl sealed::BoxBytesOf for str {
858 fn box_bytes_of(self: Box<Self>) -> BoxBytes {
859 self.into_boxed_bytes().box_bytes_of()
860 }
861}
862
863impl<T: AnyBitPattern> sealed::FromBoxBytes for T {
864 fn try_from_box_bytes(
865 bytes: BoxBytes,
866 ) -> Result<Box<Self>, (PodCastError, BoxBytes)> {
867 let layout = Layout::new::<T>();
868 if bytes.layout.align() != layout.align() {
869 Err((PodCastError::AlignmentMismatch, bytes))
870 } else if bytes.layout.size() != layout.size() {
871 Err((PodCastError::SizeMismatch, bytes))
872 } else {
873 let (ptr, _) = bytes.into_raw_parts();
874 // SAFETY: See BoxBytes type invariant.
875 Ok(unsafe { Box::from_raw(ptr.as_ptr() as *mut T) })
876 }
877 }
878}
879
880impl<T: AnyBitPattern> sealed::FromBoxBytes for [T] {
881 fn try_from_box_bytes(
882 bytes: BoxBytes,
883 ) -> Result<Box<Self>, (PodCastError, BoxBytes)> {
884 let single_layout = Layout::new::<T>();
885 if bytes.layout.align() != single_layout.align() {
886 Err((PodCastError::AlignmentMismatch, bytes))
887 } else if (single_layout.size() == 0 && bytes.layout.size() != 0)
888 || (single_layout.size() != 0
889 && bytes.layout.size() % single_layout.size() != 0)
890 {
891 Err((PodCastError::OutputSliceWouldHaveSlop, bytes))
892 } else {
893 let (ptr, layout) = bytes.into_raw_parts();
894 let length = if single_layout.size() != 0 {
895 layout.size() / single_layout.size()
896 } else {
897 0
898 };
899 let ptr =
900 core::ptr::slice_from_raw_parts_mut(ptr.as_ptr() as *mut T, length);
901 // SAFETY: See BoxBytes type invariant.
902 Ok(unsafe { Box::from_raw(ptr) })
903 }
904 }
905}
906
907/// Re-interprets `Box<T>` as `BoxBytes`.
908///
909/// `T` must be either [`Sized`] and [`NoUninit`],
910/// [`[U]`](slice) where `U: NoUninit`, or [`str`].
911#[inline]
912pub fn box_bytes_of<T: sealed::BoxBytesOf + ?Sized>(input: Box<T>) -> BoxBytes {
913 input.box_bytes_of()
914}
915
916/// Re-interprets `BoxBytes` as `Box<T>`.
917///
918/// `T` must be either [`Sized`] + [`AnyBitPattern`], or
919/// [`[U]`](slice) where `U: AnyBitPattern`.
920///
921/// ## Panics
922///
923/// This is [`try_from_box_bytes`] but will panic on error and the input will be
924/// dropped.
925#[inline]
926#[cfg_attr(feature = "track_caller", track_caller)]
927pub fn from_box_bytes<T: sealed::FromBoxBytes + ?Sized>(
928 input: BoxBytes,
929) -> Box<T> {
930 try_from_box_bytes(input).map_err(|(error, _)| error).unwrap()
931}
932
933/// Re-interprets `BoxBytes` as `Box<T>`.
934///
935/// `T` must be either [`Sized`] + [`AnyBitPattern`], or
936/// [`[U]`](slice) where `U: AnyBitPattern`.
937///
938/// Returns `Err`:
939/// * If the input isn't aligned for `T`.
940/// * If `T: Sized` and the input's length isn't exactly the size of `T`.
941/// * If `T = [U]` and the input's length isn't exactly a multiple of the size
942/// of `U`.
943#[inline]
944pub fn try_from_box_bytes<T: sealed::FromBoxBytes + ?Sized>(
945 input: BoxBytes,
946) -> Result<Box<T>, (PodCastError, BoxBytes)> {
947 T::try_from_box_bytes(input)
948}
949
950impl BoxBytes {
951 /// Constructs a `BoxBytes` from its raw parts.
952 ///
953 /// # Safety
954 ///
955 /// The pointer is owned, has been allocated with the provided layout, and
956 /// points to `layout.size()` initialized bytes.
957 pub unsafe fn from_raw_parts(ptr: NonNull<u8>, layout: Layout) -> Self {
958 BoxBytes { ptr, layout }
959 }
960
961 /// Deconstructs a `BoxBytes` into its raw parts.
962 ///
963 /// The pointer is owned, has been allocated with the provided layout, and
964 /// points to `layout.size()` initialized bytes.
965 pub fn into_raw_parts(self) -> (NonNull<u8>, Layout) {
966 let me = ManuallyDrop::new(self);
967 (me.ptr, me.layout)
968 }
969
970 /// Returns the original layout.
971 pub fn layout(&self) -> Layout {
972 self.layout
973 }
974}