bitvec/
field.rs

1#![doc = include_str!("../doc/field.md")]
2
3use core::{
4	mem,
5	ptr,
6};
7
8use funty::Integral;
9use tap::Pipe;
10use wyz::comu::{
11	Const,
12	Mut,
13};
14
15use crate::{
16	array::BitArray,
17	devel as dvl,
18	domain::{
19		Domain,
20		PartialElement,
21	},
22	mem::bits_of,
23	order::{
24		BitOrder,
25		Lsb0,
26		Msb0,
27	},
28	slice::BitSlice,
29	store::BitStore,
30	view::BitViewSized,
31};
32#[cfg(feature = "alloc")]
33use crate::{
34	boxed::BitBox,
35	vec::BitVec,
36};
37
38mod io;
39mod tests;
40
41#[doc = include_str!("../doc/field/BitField.md")]
42pub trait BitField {
43	#[inline]
44	#[cfg(not(tarpaulin_include))]
45	#[doc = include_str!("../doc/field/BitField_load.md")]
46	fn load<I>(&self) -> I
47	where I: Integral {
48		if cfg!(target_endian = "little") {
49			self.load_le::<I>()
50		}
51		else if cfg!(target_endian = "big") {
52			self.load_be::<I>()
53		}
54		else {
55			match option_env!("CARGO_PKG_REPOSITORY") {
56				Some(env) => unreachable!(
57					"This architecture is not supported! Please consider \
58					 filing an issue at {}",
59					env
60				),
61				None => unreachable!(
62					"This architecture is not supported! Please consider \
63					 filing an issue"
64				),
65			}
66		}
67	}
68
69	#[inline]
70	#[cfg(not(tarpaulin_include))]
71	#[doc = include_str!("../doc/field/BitField_store.md")]
72	fn store<I>(&mut self, value: I)
73	where I: Integral {
74		if cfg!(target_endian = "little") {
75			self.store_le::<I>(value);
76		}
77		else if cfg!(target_endian = "big") {
78			self.store_be::<I>(value);
79		}
80		else {
81			match option_env!("CARGO_PKG_REPOSITORY") {
82				Some(env) => unreachable!(
83					"This architecture is not supported! Please consider \
84					 filing an issue at {}",
85					env
86				),
87				None => unreachable!(
88					"This architecture is not supported! Please consider \
89					 filing an issue"
90				),
91			}
92		}
93	}
94
95	#[doc = include_str!("../doc/field/BitField_load_le.md")]
96	fn load_le<I>(&self) -> I
97	where I: Integral;
98
99	#[doc = include_str!("../doc/field/BitField_load_be.md")]
100	fn load_be<I>(&self) -> I
101	where I: Integral;
102
103	#[doc = include_str!("../doc/field/BitField_store_le.md")]
104	fn store_le<I>(&mut self, value: I)
105	where I: Integral;
106
107	#[doc = include_str!("../doc/field/BitField_store_be.md")]
108	fn store_be<I>(&mut self, value: I)
109	where I: Integral;
110}
111
112#[doc = include_str!("../doc/field/BitField_Lsb0.md")]
113impl<T> BitField for BitSlice<T, Lsb0>
114where T: BitStore
115{
116	#[inline]
117	#[doc = include_str!("../doc/field/BitField_Lsb0_load_le.md")]
118	fn load_le<I>(&self) -> I
119	where I: Integral {
120		let len = self.len();
121		check::<I>("load", len);
122
123		match self.domain() {
124			//  In Lsb0, the head counts distance from LSedge to first live bit.
125			Domain::Enclave(elem) => get(elem, elem.head().into_inner()),
126			Domain::Region { head, body, tail } => {
127				let mut accum = I::ZERO;
128
129				if let Some(elem) = tail {
130					accum = get(elem, 0);
131				}
132
133				for elem in body.iter().rev().map(BitStore::load_value) {
134					maybe_shift_left(&mut accum, bits_of::<T>());
135					accum |= resize::<T::Mem, I>(elem);
136				}
137
138				if let Some(elem) = head {
139					let shamt = elem.head().into_inner();
140					maybe_shift_left(
141						&mut accum,
142						bits_of::<T>() - shamt as usize,
143					);
144					accum |= get::<_, _, I>(elem, shamt);
145				}
146
147				accum
148			},
149		}
150		.pipe(|elem| sign(elem, len))
151	}
152
153	#[inline]
154	#[doc = include_str!("../doc/field/BitField_Lsb0_load_be.md")]
155	fn load_be<I>(&self) -> I
156	where I: Integral {
157		let len = self.len();
158		check::<I>("load", len);
159
160		match self.domain() {
161			Domain::Enclave(elem) => get(elem, elem.head().into_inner()),
162			Domain::Region { head, body, tail } => {
163				let mut accum = I::ZERO;
164
165				if let Some(elem) = head {
166					accum = get(elem, elem.head().into_inner());
167				}
168
169				for elem in body.iter().map(BitStore::load_value) {
170					maybe_shift_left(&mut accum, bits_of::<T>());
171					accum |= resize::<T::Mem, I>(elem);
172				}
173
174				if let Some(elem) = tail {
175					let shamt = elem.tail().into_inner() as usize;
176					maybe_shift_left(&mut accum, shamt);
177					accum |= get::<_, _, I>(elem, 0);
178				}
179
180				accum
181			},
182		}
183		.pipe(|elem| sign(elem, len))
184	}
185
186	#[inline]
187	#[doc = include_str!("../doc/field/BitField_Lsb0_store_le.md")]
188	fn store_le<I>(&mut self, mut value: I)
189	where I: Integral {
190		check::<I>("store", self.len());
191
192		match self.domain_mut() {
193			Domain::Enclave(elem) => {
194				let shamt = elem.head().into_inner();
195				set(elem, value, shamt);
196			},
197			Domain::Region { head, body, tail } => {
198				if let Some(elem) = head {
199					let shamt = elem.head().into_inner();
200					set(elem, value, shamt);
201					let rshamt = bits_of::<T>() - shamt as usize;
202					maybe_shift_right(&mut value, rshamt);
203				}
204
205				for elem in body.iter_mut() {
206					elem.store_value(resize(value));
207					maybe_shift_right(&mut value, bits_of::<T>());
208				}
209
210				if let Some(elem) = tail {
211					set(elem, value, 0);
212				}
213			},
214		}
215	}
216
217	#[inline]
218	#[doc = include_str!("../doc/field/BitField_Lsb0_store_be.md")]
219	fn store_be<I>(&mut self, mut value: I)
220	where I: Integral {
221		check::<I>("store", self.len());
222
223		match self.domain_mut() {
224			Domain::Enclave(elem) => {
225				let shamt = elem.head().into_inner();
226				set(elem, value, shamt);
227			},
228			Domain::Region { head, body, tail } => {
229				if let Some(elem) = tail {
230					let shamt = elem.tail().into_inner() as usize;
231					set(elem, value, 0);
232					maybe_shift_right(&mut value, shamt);
233				}
234
235				for elem in body.iter_mut().rev() {
236					elem.store_value(resize(value));
237					maybe_shift_right(&mut value, bits_of::<T>());
238				}
239
240				if let Some(elem) = head {
241					let shamt = elem.head().into_inner();
242					set(elem, value, shamt);
243				}
244			},
245		}
246	}
247}
248
249#[doc = include_str!("../doc/field/BitField_Msb0.md")]
250impl<T> BitField for BitSlice<T, Msb0>
251where T: BitStore
252{
253	#[inline]
254	#[doc = include_str!("../doc/field/BitField_Msb0_load_le.md")]
255	fn load_le<I>(&self) -> I
256	where I: Integral {
257		let len = self.len();
258		check::<I>("load", len);
259
260		match self.domain() {
261			Domain::Enclave(elem) => {
262				let shamt = bits_of::<T>() as u8 - elem.tail().into_inner();
263				get(elem, shamt)
264			},
265			Domain::Region { head, body, tail } => {
266				let mut accum = I::ZERO;
267
268				if let Some(elem) = tail {
269					let shamt = bits_of::<T>() as u8 - elem.tail().into_inner();
270					accum = get(elem, shamt);
271				}
272
273				for elem in body.iter().rev().map(BitStore::load_value) {
274					maybe_shift_left(&mut accum, bits_of::<T>());
275					accum |= resize::<T::Mem, I>(elem);
276				}
277
278				if let Some(elem) = head {
279					let shamt =
280						bits_of::<T>() - elem.head().into_inner() as usize;
281					maybe_shift_left(&mut accum, shamt);
282					accum |= get::<_, _, I>(elem, 0);
283				}
284
285				accum
286			},
287		}
288		.pipe(|elem| sign(elem, len))
289	}
290
291	#[inline]
292	#[doc = include_str!("../doc/field/BitField_Msb0_load_be.md")]
293	fn load_be<I>(&self) -> I
294	where I: Integral {
295		let len = self.len();
296		check::<I>("load", len);
297
298		match self.domain() {
299			Domain::Enclave(elem) => {
300				let shamt = bits_of::<T>() as u8 - elem.tail().into_inner();
301				get(elem, shamt)
302			},
303			Domain::Region { head, body, tail } => {
304				let mut accum = I::ZERO;
305
306				if let Some(elem) = head {
307					accum = get(elem, 0);
308				}
309
310				for elem in body.iter().map(BitStore::load_value) {
311					maybe_shift_left(&mut accum, bits_of::<T>());
312					accum |= resize::<T::Mem, I>(elem);
313				}
314
315				if let Some(elem) = tail {
316					let shamt = elem.tail().into_inner();
317					maybe_shift_left(&mut accum, shamt as usize);
318					accum |= get::<_, _, I>(elem, bits_of::<T>() as u8 - shamt);
319				}
320
321				accum
322			},
323		}
324		.pipe(|elem| sign(elem, len))
325	}
326
327	#[inline]
328	#[doc = include_str!("../doc/field/BitField_Msb0_store_le.md")]
329	fn store_le<I>(&mut self, mut value: I)
330	where I: Integral {
331		check::<I>("store", self.len());
332
333		match self.domain_mut() {
334			Domain::Enclave(elem) => {
335				let shamt = bits_of::<T>() as u8 - elem.tail().into_inner();
336				set(elem, value, shamt);
337			},
338			Domain::Region { head, body, tail } => {
339				if let Some(elem) = head {
340					let shamt =
341						bits_of::<T>() - elem.head().into_inner() as usize;
342					set(elem, value, 0);
343					maybe_shift_right(&mut value, shamt);
344				}
345
346				for elem in body.iter_mut() {
347					elem.store_value(resize(value));
348					maybe_shift_right(&mut value, bits_of::<T>());
349				}
350
351				if let Some(elem) = tail {
352					let shamt = bits_of::<T>() as u8 - elem.tail().into_inner();
353					set(elem, value, shamt);
354				}
355			},
356		}
357	}
358
359	#[inline]
360	#[doc = include_str!("../doc/field/BitField_Msb0_store_be.md")]
361	fn store_be<I>(&mut self, mut value: I)
362	where I: Integral {
363		check::<I>("store", self.len());
364
365		match self.domain_mut() {
366			Domain::Enclave(elem) => {
367				let shamt = bits_of::<T>() as u8 - elem.tail().into_inner();
368				set(elem, value, shamt);
369			},
370			Domain::Region { head, body, tail } => {
371				if let Some(elem) = tail {
372					let tail = elem.tail().into_inner() as usize;
373					let shamt = bits_of::<T>() - tail;
374					set(elem, value, shamt as u8);
375					maybe_shift_right(&mut value, tail);
376				}
377
378				for elem in body.iter_mut().rev() {
379					elem.store_value(resize(value));
380					maybe_shift_right(&mut value, bits_of::<T>());
381				}
382
383				if let Some(elem) = head {
384					set(elem, value, 0);
385				}
386			},
387		}
388	}
389}
390
391#[doc = include_str!("../doc/field/impl_BitArray.md")]
392impl<A, O> BitField for BitArray<A, O>
393where
394	O: BitOrder,
395	A: BitViewSized,
396	BitSlice<A::Store, O>: BitField,
397{
398	#[inline(always)]
399	fn load_le<I>(&self) -> I
400	where I: Integral {
401		let mut accum = I::ZERO;
402
403		for elem in self.as_raw_slice().iter().map(BitStore::load_value).rev() {
404			maybe_shift_left(&mut accum, bits_of::<A::Store>());
405			accum |= resize::<_, I>(elem);
406		}
407
408		sign(accum, self.len())
409	}
410
411	#[inline(always)]
412	fn load_be<I>(&self) -> I
413	where I: Integral {
414		let mut accum = I::ZERO;
415
416		for elem in self.as_raw_slice().iter().map(BitStore::load_value) {
417			maybe_shift_left(&mut accum, bits_of::<A::Store>());
418			accum |= resize::<_, I>(elem);
419		}
420
421		sign(accum, self.len())
422	}
423
424	#[inline(always)]
425	fn store_le<I>(&mut self, mut value: I)
426	where I: Integral {
427		for slot in self.as_raw_mut_slice() {
428			slot.store_value(resize(value));
429			maybe_shift_right(&mut value, bits_of::<A::Store>());
430		}
431	}
432
433	#[inline(always)]
434	fn store_be<I>(&mut self, mut value: I)
435	where I: Integral {
436		for slot in self.as_raw_mut_slice().iter_mut().rev() {
437			slot.store_value(resize(value));
438			maybe_shift_right(&mut value, bits_of::<A::Store>());
439		}
440	}
441}
442
443#[cfg(feature = "alloc")]
444#[cfg(not(tarpaulin_include))]
445impl<T, O> BitField for BitBox<T, O>
446where
447	T: BitStore,
448	O: BitOrder,
449	BitSlice<T, O>: BitField,
450{
451	#[inline(always)]
452	fn load_le<I>(&self) -> I
453	where I: Integral {
454		self.as_bitslice().load_le()
455	}
456
457	#[inline(always)]
458	fn load_be<I>(&self) -> I
459	where I: Integral {
460		self.as_bitslice().load_be()
461	}
462
463	#[inline(always)]
464	fn store_le<I>(&mut self, value: I)
465	where I: Integral {
466		self.as_mut_bitslice().store_le(value)
467	}
468
469	#[inline(always)]
470	fn store_be<I>(&mut self, value: I)
471	where I: Integral {
472		self.as_mut_bitslice().store_be(value)
473	}
474}
475
476#[cfg(feature = "alloc")]
477#[cfg(not(tarpaulin_include))]
478impl<T, O> BitField for BitVec<T, O>
479where
480	T: BitStore,
481	O: BitOrder,
482	BitSlice<T, O>: BitField,
483{
484	#[inline(always)]
485	fn load_le<I>(&self) -> I
486	where I: Integral {
487		self.as_bitslice().load_le()
488	}
489
490	#[inline(always)]
491	fn load_be<I>(&self) -> I
492	where I: Integral {
493		self.as_bitslice().load_be()
494	}
495
496	#[inline(always)]
497	fn store_le<I>(&mut self, value: I)
498	where I: Integral {
499		self.as_mut_bitslice().store_le(value)
500	}
501
502	#[inline(always)]
503	fn store_be<I>(&mut self, value: I)
504	where I: Integral {
505		self.as_mut_bitslice().store_be(value)
506	}
507}
508
509/** Asserts that a bit-slice is not longer than a memory element.
510
511## Type Parameters
512
513- `I`: The integer type being stored into or loaded out of a bit-slice.
514
515## Parameters
516
517- `action`: the verb being performed. One of `"load"` or `"store"`.
518- `len`: the length of the bit-slice under test.
519
520## Panics
521
522This panics if `len` is not in `1 ..= U::BITS`.
523**/
524fn check<I>(action: &'static str, len: usize)
525where I: Integral {
526	assert!(
527		(1 ..= bits_of::<I>()).contains(&len),
528		"cannot {} {} bits from a {}-bit region",
529		action,
530		bits_of::<I>(),
531		len,
532	);
533}
534
535/// Shifts a value to the left, if it can support the shift amount.
536fn maybe_shift_left<T: Integral>(elem: &mut T, shamt: usize) {
537	if bits_of::<T>() > shamt {
538		*elem <<= shamt;
539	}
540}
541
542/// Shifts a value to the right, if it can support the shift amount.
543fn maybe_shift_right<T: Integral>(elem: &mut T, shamt: usize) {
544	if bits_of::<T>() > shamt {
545		*elem >>= shamt;
546	}
547}
548
549#[doc = include_str!("../doc/field/get.md")]
550fn get<T, O, I>(elem: PartialElement<Const, T, O>, shamt: u8) -> I
551where
552	T: BitStore,
553	O: BitOrder,
554	I: Integral,
555{
556	resize::<T::Mem, I>(elem.load_value() >> shamt)
557}
558
559#[doc = include_str!("../doc/field/set.md")]
560fn set<T, O, I>(mut elem: PartialElement<Mut, T, O>, value: I, shamt: u8)
561where
562	T: BitStore,
563	O: BitOrder,
564	I: Integral,
565{
566	elem.store_value(resize::<I, T::Mem>(value) << shamt);
567}
568
569#[doc = include_str!("../doc/field/sign.md")]
570fn sign<I>(elem: I, width: usize) -> I
571where I: Integral {
572	if dvl::is_unsigned::<I>() {
573		return elem;
574	}
575	//  Find the number of high bits that are not loaded.
576	let shamt = bits_of::<I>() - width;
577	//  Shift left, so that the highest loaded bit is now in the sign position.
578	let shl: I = elem << shamt;
579	//  Shift right with sign extension back to the original place.
580	shl >> shamt
581}
582
583#[doc = include_str!("../doc/field/resize.md")]
584fn resize<T, U>(value: T) -> U
585where
586	T: Integral,
587	U: Integral,
588{
589	let mut out = U::ZERO;
590	let size_t = mem::size_of::<T>();
591	let size_u = mem::size_of::<U>();
592
593	unsafe {
594		resize_inner::<T, U>(&value, &mut out, size_t, size_u);
595	}
596
597	out
598}
599
600/// Performs little-endian byte-order register resizing.
601#[cfg(target_endian = "little")]
602unsafe fn resize_inner<T, U>(
603	src: &T,
604	dst: &mut U,
605	size_t: usize,
606	size_u: usize,
607) {
608	//  In LE, the least-significant byte is the base address, so resizing is
609	//  just a `memmove` into a zeroed slot, taking only the lesser width.
610	ptr::copy_nonoverlapping(
611		src as *const T as *const u8,
612		dst as *mut U as *mut u8,
613		size_t.min(size_u),
614	);
615}
616
617/// Performs big-endian byte-order register resizing.
618#[cfg(target_endian = "big")]
619unsafe fn resize_inner<T, U>(
620	src: &T,
621	dst: &mut U,
622	size_t: usize,
623	size_u: usize,
624) {
625	let src = src as *const T as *const u8;
626	let dst = dst as *mut U as *mut u8;
627
628	//  In BE, shrinking a value requires moving the source base-pointer up in
629	//  memory (to a higher address, lower significance),
630	if size_t > size_u {
631		ptr::copy_nonoverlapping(src.add(size_t - size_u), dst, size_u);
632	}
633	//  While expanding a value requires moving the *destination* base-pointer
634	//  up (and leaving the lower address, higher significance bytes zeroed).
635	else {
636		ptr::copy_nonoverlapping(src, dst.add(size_u - size_t), size_t);
637	}
638}