1#![doc = include_str!("../../doc/ptr/span.md")]
2
3use core::{
4 any,
5 fmt::{
6 self,
7 Binary,
8 Debug,
9 Display,
10 Formatter,
11 Pointer,
12 },
13 marker::PhantomData,
14 mem,
15 ptr::{
16 self,
17 NonNull,
18 },
19};
20
21use tap::Pipe;
22use wyz::{
23 comu::{
24 Address,
25 Const,
26 Mut,
27 Mutability,
28 NullPtrError,
29 Reference,
30 Referential,
31 },
32 fmt::FmtForward,
33};
34
35use super::{
36 BitPtr,
37 BitPtrError,
38 BitPtrRange,
39 MisalignError,
40};
41use crate::{
42 index::{
43 BitEnd,
44 BitIdx,
45 },
46 mem::{
47 bits_of,
48 BitRegister,
49 },
50 order::{
51 BitOrder,
52 Lsb0,
53 },
54 slice::BitSlice,
55 store::BitStore,
56};
57
58#[doc = include_str!("../../doc/ptr/BitSpan.md")]
59pub(crate) struct BitSpan<M = Const, T = usize, O = Lsb0>
60where
61 M: Mutability,
62 T: BitStore,
63 O: BitOrder,
64{
65 ptr: NonNull<()>,
67 len: usize,
71 _or: PhantomData<O>,
73 _ty: PhantomData<Address<M, [T]>>,
75}
76
77impl<M, T, O> BitSpan<M, T, O>
78where
79 M: Mutability,
80 T: BitStore,
81 O: BitOrder,
82{
83 pub(crate) const EMPTY: Self = Self {
85 ptr: NonNull::<T>::dangling().cast::<()>(),
86 len: 0,
87 _or: PhantomData,
88 _ty: PhantomData,
89 };
90 pub(crate) const LEN_HEAD_BITS: usize = 3;
96 pub(crate) const LEN_HEAD_MASK: usize = 0b111;
98 pub(crate) const PTR_ADDR_MASK: usize = !0 << Self::PTR_HEAD_BITS;
100 pub(crate) const PTR_HEAD_BITS: usize =
103 <T::Mem as BitRegister>::INDX as usize - Self::LEN_HEAD_BITS;
104 pub(crate) const PTR_HEAD_MASK: usize = !Self::PTR_ADDR_MASK;
106 pub(crate) const REGION_MAX_BITS: usize = !0 >> Self::LEN_HEAD_BITS;
110 pub(crate) const REGION_MAX_ELTS: usize =
121 crate::mem::elts::<T::Mem>(Self::REGION_MAX_BITS) + 1;
122}
123
124impl<M, T, O> BitSpan<M, T, O>
126where
127 M: Mutability,
128 T: BitStore,
129 O: BitOrder,
130{
131 #[cfg(feature = "alloc")]
145 pub(crate) fn uninhabited(addr: Address<M, T>) -> Self {
146 Self {
147 ptr: addr.into_inner().cast::<()>(),
148 ..Self::EMPTY
149 }
150 }
151
152 pub(crate) fn new(
171 addr: Address<M, T>,
172 head: BitIdx<T::Mem>,
173 bits: usize,
174 ) -> Result<Self, BitSpanError<T>> {
175 if bits > Self::REGION_MAX_BITS {
176 return Err(BitSpanError::TooLong(bits));
177 }
178 let base = BitPtr::<M, T, O>::new(addr, head)?;
179 let last = base.wrapping_add(bits);
180 if last < base {
181 return Err(BitSpanError::TooHigh(addr.to_const()));
182 }
183
184 Ok(unsafe { Self::new_unchecked(addr, head, bits) })
185 }
186
187 pub(crate) unsafe fn new_unchecked(
201 addr: Address<M, T>,
202 head: BitIdx<T::Mem>,
203 bits: usize,
204 ) -> Self {
205 let addr = addr.to_const().cast::<u8>();
206
207 let head = head.into_inner() as usize;
208 let ptr_data = addr as usize & Self::PTR_ADDR_MASK;
209 let ptr_head = head >> Self::LEN_HEAD_BITS;
210
211 let len_head = head & Self::LEN_HEAD_MASK;
212 let len_bits = bits << Self::LEN_HEAD_BITS;
213
214 let ptr_raw = ptr_data | ptr_head;
223 let ptr = addr.wrapping_add(ptr_raw.wrapping_sub(addr as usize));
224
225 Self {
226 ptr: NonNull::new_unchecked(ptr.cast::<()>() as *mut ()),
227 len: len_bits | len_head,
228 ..Self::EMPTY
229 }
230 }
231}
232
233impl<M, T, O> BitSpan<M, T, O>
235where
236 M: Mutability,
237 T: BitStore,
238 O: BitOrder,
239{
240 pub(crate) fn address(&self) -> Address<M, T> {
252 Address::new(unsafe {
253 NonNull::new_unchecked(
254 (self.ptr.as_ptr() as usize & Self::PTR_ADDR_MASK) as *mut T,
255 )
256 })
257 }
258
259 #[cfg(feature = "alloc")]
273 pub(crate) unsafe fn set_address(&mut self, addr: Address<M, T>) {
274 let mut addr_value = addr.to_const() as usize;
275 addr_value &= Self::PTR_ADDR_MASK;
276 addr_value |= self.ptr.as_ptr() as usize & Self::PTR_HEAD_MASK;
277 self.ptr = NonNull::new_unchecked(addr_value as *mut ())
278 }
279
280 pub(crate) fn head(&self) -> BitIdx<T::Mem> {
294 let ptr = self.ptr.as_ptr() as usize;
295 let ptr_head = (ptr & Self::PTR_HEAD_MASK) << Self::LEN_HEAD_BITS;
296 let len_head = self.len & Self::LEN_HEAD_MASK;
297 unsafe { BitIdx::new_unchecked((ptr_head | len_head) as u8) }
298 }
299
300 #[cfg(feature = "alloc")]
312 pub(crate) unsafe fn set_head(&mut self, head: BitIdx<T::Mem>) {
313 let head = head.into_inner() as usize;
314 let mut ptr = self.ptr.as_ptr() as usize;
315
316 ptr &= Self::PTR_ADDR_MASK;
317 ptr |= head >> Self::LEN_HEAD_BITS;
318 self.ptr = NonNull::new_unchecked(ptr as *mut ());
319
320 self.len &= !Self::LEN_HEAD_MASK;
321 self.len |= head & Self::LEN_HEAD_MASK;
322 }
323
324 pub(crate) fn len(&self) -> usize {
334 self.len >> Self::LEN_HEAD_BITS
335 }
336
337 pub(crate) unsafe fn set_len(&mut self, new_len: usize) {
351 if cfg!(debug_assertions) {
352 *self = Self::new(self.address(), self.head(), new_len).unwrap();
353 }
354 else {
355 self.len &= Self::LEN_HEAD_MASK;
356 self.len |= new_len << Self::LEN_HEAD_BITS;
357 }
358 }
359
360 pub(crate) fn raw_parts(&self) -> (Address<M, T>, BitIdx<T::Mem>, usize) {
376 (self.address(), self.head(), self.len())
377 }
378}
379
380impl<M, T, O> BitSpan<M, T, O>
382where
383 M: Mutability,
384 T: BitStore,
385 O: BitOrder,
386{
387 pub(crate) fn elements(&self) -> usize {
401 crate::mem::elts::<T>(self.len() + self.head().into_inner() as usize)
402 }
403
404 pub(crate) fn tail(&self) -> BitEnd<T::Mem> {
418 let (head, len) = (self.head(), self.len());
419 let (_, tail) = head.span(len);
420 tail
421 }
422}
423
424impl<M, T, O> BitSpan<M, T, O>
426where
427 M: Mutability,
428 T: BitStore,
429 O: BitOrder,
430{
431 pub(crate) fn cast<U>(self) -> BitSpan<M, U, O>
437 where U: BitStore {
438 let Self { ptr, len, .. } = self;
439 BitSpan {
440 ptr,
441 len,
442 ..BitSpan::EMPTY
443 }
444 }
445
446 pub(crate) unsafe fn align_to<U>(self) -> (Self, BitSpan<M, U, O>, Self)
457 where U: BitStore {
458 let this = self.to_bitptr();
465 let mut rem = self.len();
467 let align = mem::align_of::<U>();
469 let step = this.align_offset(align);
472 if step > rem {
474 return (self, BitSpan::EMPTY, Self::EMPTY);
475 }
476 let left = this.span_unchecked(step);
477 rem -= step;
478
479 let mid_base =
480 this.add(step).address().cast::<U>().pipe(|addr| {
481 BitPtr::<M, U, O>::new_unchecked(addr, BitIdx::MIN)
482 });
483 let mid_elts = rem >> <U::Mem as BitRegister>::INDX;
484 let excess = rem & <U::Mem as BitRegister>::MASK as usize;
485 let step = rem - excess;
486 let mid = mid_base.span_unchecked(step);
487
488 let right_base =
489 mid_base.address().add(mid_elts).cast::<T>().pipe(|addr| {
490 BitPtr::<M, T, O>::new_unchecked(addr, BitIdx::MIN)
491 });
492 let right = right_base.span_unchecked(excess);
493
494 (left, mid, right)
495 }
496
497 pub(crate) fn from_bitslice_ptr_mut(raw: *mut BitSlice<T, O>) -> Self {
499 let BitSpan { ptr, len, .. } =
500 BitSpan::from_bitslice_ptr(raw as *const BitSlice<T, O>);
501 Self {
502 ptr,
503 len,
504 ..Self::EMPTY
505 }
506 }
507
508 pub(crate) fn into_bitslice_ptr(self) -> *const BitSlice<T, O> {
512 let Self { ptr, len, .. } = self;
513 ptr::slice_from_raw_parts(ptr.as_ptr(), len) as *const BitSlice<T, O>
514 }
515
516 pub(crate) unsafe fn into_bitslice_ref<'a>(self) -> &'a BitSlice<T, O> {
525 &*self.into_bitslice_ptr()
526 }
527
528 pub(crate) fn to_bitptr(self) -> BitPtr<M, T, O> {
533 unsafe { BitPtr::new_unchecked(self.address(), self.head()) }
534 }
535
536 pub(crate) fn to_bitptr_range(self) -> BitPtrRange<M, T, O> {
541 let start = self.to_bitptr();
542 let end = unsafe { start.add(self.len()) };
543 BitPtrRange { start, end }
544 }
545
546 pub(crate) fn to_bitslice_addr(self) -> Address<M, BitSlice<T, O>> {
550 (self.into_bitslice_ptr() as *mut BitSlice<T, O>)
551 .pipe(|ptr| unsafe { NonNull::new_unchecked(ptr) })
552 .pipe(Address::new)
553 }
554
555 pub(crate) fn to_bitslice<'a>(self) -> Reference<'a, M, BitSlice<T, O>>
559 where Address<M, BitSlice<T, O>>: Referential<'a> {
560 unsafe { self.to_bitslice_addr().to_ref() }
561 }
562}
563
564impl<T, O> BitSpan<Const, T, O>
566where
567 T: BitStore,
568 O: BitOrder,
569{
570 pub(crate) fn from_bitslice_ptr(raw: *const BitSlice<T, O>) -> Self {
572 let slice_nn = match NonNull::new(raw as *const [()] as *mut [()]) {
573 Some(nn) => nn,
574 None => return Self::EMPTY,
575 };
576 let ptr = slice_nn.cast::<()>();
577 let len = unsafe { slice_nn.as_ref() }.len();
578 Self {
579 ptr,
580 len,
581 ..Self::EMPTY
582 }
583 }
584}
585
586impl<T, O> BitSpan<Mut, T, O>
588where
589 T: BitStore,
590 O: BitOrder,
591{
592 pub(crate) fn into_bitslice_ptr_mut(self) -> *mut BitSlice<T, O> {
596 self.into_bitslice_ptr() as *mut BitSlice<T, O>
597 }
598
599 pub(crate) unsafe fn into_bitslice_mut<'a>(self) -> &'a mut BitSlice<T, O> {
608 &mut *self.into_bitslice_ptr_mut()
609 }
610}
611
612impl<M, T, O> BitSpan<M, T, O>
614where
615 M: Mutability,
616 T: BitStore,
617 O: BitOrder,
618{
619 #[cfg(feature = "alloc")]
623 pub(crate) fn len_encodable(len: usize) -> bool {
624 len <= Self::REGION_MAX_BITS
625 }
626
627 pub(crate) fn render<'a>(
657 &'a self,
658 fmt: &'a mut Formatter,
659 name: &'a str,
660 fields: impl IntoIterator<Item = &'a (&'a str, &'a dyn Debug)>,
661 ) -> fmt::Result {
662 write!(
663 fmt,
664 "Bit{}<{}, {}>",
665 name,
666 any::type_name::<T::Mem>(),
667 any::type_name::<O>(),
668 )?;
669 let mut builder = fmt.debug_struct("");
670 builder
671 .field("addr", &self.address().fmt_pointer())
672 .field("head", &self.head().fmt_binary())
673 .field("bits", &self.len());
674 for (name, value) in fields {
675 builder.field(name, value);
676 }
677 builder.finish()
678 }
679}
680
681#[cfg(not(tarpaulin_include))]
682impl<M, T, O> Clone for BitSpan<M, T, O>
683where
684 M: Mutability,
685 T: BitStore,
686 O: BitOrder,
687{
688 #[inline]
689 fn clone(&self) -> Self {
690 *self
691 }
692}
693
694impl<M1, M2, O, T1, T2> PartialEq<BitSpan<M2, T2, O>> for BitSpan<M1, T1, O>
695where
696 M1: Mutability,
697 M2: Mutability,
698 O: BitOrder,
699 T1: BitStore,
700 T2: BitStore,
701{
702 #[inline]
703 fn eq(&self, other: &BitSpan<M2, T2, O>) -> bool {
704 let (addr_a, head_a, bits_a) = self.raw_parts();
705 let (addr_b, head_b, bits_b) = other.raw_parts();
706 bits_of::<T1::Mem>() == bits_of::<T2::Mem>()
707 && addr_a.to_const() as usize == addr_b.to_const() as usize
708 && head_a.into_inner() == head_b.into_inner()
709 && bits_a == bits_b
710 }
711}
712
713impl<T, O> From<&BitSlice<T, O>> for BitSpan<Const, T, O>
714where
715 T: BitStore,
716 O: BitOrder,
717{
718 #[inline]
719 fn from(bits: &BitSlice<T, O>) -> Self {
720 Self::from_bitslice_ptr(bits)
721 }
722}
723
724impl<T, O> From<&mut BitSlice<T, O>> for BitSpan<Mut, T, O>
725where
726 T: BitStore,
727 O: BitOrder,
728{
729 #[inline]
730 fn from(bits: &mut BitSlice<T, O>) -> Self {
731 Self::from_bitslice_ptr_mut(bits)
732 }
733}
734
735#[cfg(not(tarpaulin_include))]
736impl<M, T, O> Default for BitSpan<M, T, O>
737where
738 M: Mutability,
739 T: BitStore,
740 O: BitOrder,
741{
742 #[inline]
743 fn default() -> Self {
744 Self::EMPTY
745 }
746}
747
748impl<M, T, O> Debug for BitSpan<M, T, O>
749where
750 M: Mutability,
751 T: BitStore,
752 O: BitOrder,
753{
754 #[inline]
755 fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
756 self.render(fmt, "Span", None)
757 }
758}
759
760impl<M, T, O> Pointer for BitSpan<M, T, O>
761where
762 M: Mutability,
763 T: BitStore,
764 O: BitOrder,
765{
766 #[inline]
767 fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
768 Pointer::fmt(&self.address(), fmt)?;
769 fmt.write_str("(")?;
770 Binary::fmt(&self.head(), fmt)?;
771 fmt.write_str(")[")?;
772 Display::fmt(&self.len(), fmt)?;
773 fmt.write_str("]")
774 }
775}
776
777impl<M, T, O> Copy for BitSpan<M, T, O>
778where
779 M: Mutability,
780 T: BitStore,
781 O: BitOrder,
782{
783}
784
785#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)]
787pub enum BitSpanError<T>
788where T: BitStore
789{
790 Null(NullPtrError),
792 Misaligned(MisalignError<T>),
794 TooLong(usize),
796 TooHigh(*const T),
798}
799
800#[cfg(not(tarpaulin_include))]
801impl<T> From<BitPtrError<T>> for BitSpanError<T>
802where T: BitStore
803{
804 #[inline]
805 fn from(err: BitPtrError<T>) -> Self {
806 match err {
807 BitPtrError::Null(err) => Self::Null(err),
808 BitPtrError::Misaligned(err) => Self::Misaligned(err),
809 }
810 }
811}
812
813#[cfg(not(tarpaulin_include))]
814impl<T> From<MisalignError<T>> for BitSpanError<T>
815where T: BitStore
816{
817 #[inline]
818 fn from(err: MisalignError<T>) -> Self {
819 Self::Misaligned(err)
820 }
821}
822
823#[cfg(not(tarpaulin_include))]
824impl<T> Debug for BitSpanError<T>
825where T: BitStore
826{
827 #[inline]
828 fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
829 write!(fmt, "BitSpanError<{}>::", any::type_name::<T::Mem>())?;
830 match self {
831 Self::Null(err) => fmt.debug_tuple("Null").field(&err).finish(),
832 Self::Misaligned(err) => {
833 fmt.debug_tuple("Misaligned").field(&err).finish()
834 },
835 Self::TooLong(len) => fmt.debug_tuple("TooLong").field(len).finish(),
836 Self::TooHigh(addr) => {
837 fmt.debug_tuple("TooHigh").field(addr).finish()
838 },
839 }
840 }
841}
842
843#[cfg(not(tarpaulin_include))]
844impl<T> Display for BitSpanError<T>
845where T: BitStore
846{
847 #[inline]
848 fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
849 match self {
850 Self::Null(err) => Display::fmt(err, fmt),
851 Self::Misaligned(err) => Display::fmt(err, fmt),
852 Self::TooLong(len) => write!(
853 fmt,
854 "Length {} is too long to encode in a bit-slice, which can \
855 only accept {} bits",
856 len,
857 BitSpan::<Const, T, Lsb0>::REGION_MAX_BITS,
858 ),
859 Self::TooHigh(addr) => write!(
860 fmt,
861 "Address {:p} is too high, and produces a span that wraps \
862 around to the zero address.",
863 addr,
864 ),
865 }
866 }
867}
868
869unsafe impl<T> Send for BitSpanError<T> where T: BitStore {}
870
871unsafe impl<T> Sync for BitSpanError<T> where T: BitStore {}
872
873#[cfg(feature = "std")]
874impl<T> std::error::Error for BitSpanError<T> where T: BitStore {}