bitvec/slice/specialization/
msb0.rs

1//! Specializations for `BitSlice<_, Msb0>.
2
3use core::iter;
4
5use funty::Integral;
6use wyz::{
7	bidi::BidiIterator,
8	range::RangeExt,
9};
10
11use super::{
12	has_one,
13	has_zero,
14	WORD_BITS,
15};
16use crate::{
17	domain::Domain,
18	field::BitField,
19	mem::bits_of,
20	order::Msb0,
21	slice::BitSlice,
22	store::BitStore,
23};
24
25impl<T> BitSlice<T, Msb0>
26where T: BitStore
27{
28	/// Accelerates Boolean arithmetic.
29	///
30	/// This applies a Boolean-arithmetic function across all the bits in a
31	/// pair. The secondary bit-slice is zero-extended if it expires before
32	/// `self` does.
33	///
34	/// Because the two bit-slices share the same types, this is able to
35	/// batch-load `usize` chunks from each, apply the arithmetic to them, and
36	/// write the result back into `self`. Any leftover bits are handled
37	/// individually.
38	pub(crate) fn sp_bitop_assign(
39		&mut self,
40		rhs: &Self,
41		word_op: fn(usize, usize) -> usize,
42		bool_op: fn(bool, bool) -> bool,
43	) {
44		let (mut this, mut that) = (self, rhs);
45		while this.len() >= WORD_BITS && that.len() >= WORD_BITS {
46			unsafe {
47				let (l, left) = this.split_at_unchecked_mut_noalias(WORD_BITS);
48				let (r, right) = that.split_at_unchecked(WORD_BITS);
49				this = left;
50				that = right;
51				let (a, b) = (l.load_be::<usize>(), r.load_be::<usize>());
52				l.store_be(word_op(a, b));
53			}
54		}
55		for (l, r) in this
56			.as_mut_bitptr_range()
57			.zip(that.iter().by_vals().chain(iter::repeat(false)))
58		{
59			unsafe {
60				l.write(bool_op(l.read(), r));
61			}
62		}
63	}
64
65	/// Accelerates copies between disjoint bit-slices with batch loads.
66	pub(crate) fn sp_copy_from_bitslice(&mut self, src: &Self) {
67		assert_eq!(
68			self.len(),
69			src.len(),
70			"copying between bit-slices requires equal lengths",
71		);
72
73		for (to, from) in unsafe { self.chunks_mut(WORD_BITS).remove_alias() }
74			.zip(src.chunks(WORD_BITS))
75		{
76			to.store_be::<usize>(from.load_be::<usize>());
77		}
78	}
79
80	/// Accelerates possibly-overlapping copies within a single bit-slice with
81	/// batch loads.
82	pub(crate) unsafe fn sp_copy_within_unchecked(
83		&mut self,
84		src: impl RangeExt<usize>,
85		dest: usize,
86	) {
87		let source = src.normalize(None, self.len());
88		let rev = source.contains(&dest);
89		let dest = dest .. dest + source.len();
90
91		let this = self.as_accessor();
92		let from = this
93			.get_unchecked(source)
94			.chunks(WORD_BITS)
95			.map(|bits| bits as *const BitSlice<T::Access, Msb0>);
96		let to = this.get_unchecked(dest).chunks(WORD_BITS).map(|bits| {
97			bits as *const BitSlice<T::Access, Msb0>
98				as *mut BitSlice<T::Access, Msb0>
99		});
100		for (from, to) in from.zip(to).bidi(rev) {
101			let value = (*from).load_be::<usize>();
102			(*to).store_be::<usize>(value);
103		}
104	}
105
106	/// Accelerates equality checking with batch loads.
107	pub(crate) fn sp_eq(&self, other: &Self) -> bool {
108		self.len() == other.len()
109			&& self
110				.chunks(WORD_BITS)
111				.zip(other.chunks(WORD_BITS))
112				.all(|(a, b)| a.load_be::<usize>() == b.load_be::<usize>())
113	}
114
115	/// Seeks the index of the first `1` bit in the bit-slice.
116	pub(crate) fn sp_first_one(&self) -> Option<usize> {
117		let mut accum = 0;
118
119		match self.domain() {
120			Domain::Enclave(elem) => {
121				let val = elem.load_value();
122				accum += val.leading_zeros() as usize
123					- elem.head().into_inner() as usize;
124				if has_one(val, elem.mask().into_inner()) {
125					return Some(accum);
126				}
127				None
128			},
129			Domain::Region { head, body, tail } => {
130				if let Some(elem) = head {
131					let val = elem.load_value();
132					accum += val.leading_zeros() as usize
133						- elem.head().into_inner() as usize;
134					if has_one(val, elem.mask().into_inner()) {
135						return Some(accum);
136					}
137				}
138
139				for val in body.iter().map(BitStore::load_value) {
140					accum += val.leading_zeros() as usize;
141					if has_one(val, !<T::Mem as Integral>::ZERO) {
142						return Some(accum);
143					}
144				}
145
146				if let Some(elem) = tail {
147					let val = elem.load_value();
148					accum += val.leading_zeros() as usize;
149					if has_one(val, elem.mask().into_inner()) {
150						return Some(accum);
151					}
152				}
153
154				None
155			},
156		}
157	}
158
159	/// Seeks the index of the last `1` bit in the bit-slice.
160	pub(crate) fn sp_last_one(&self) -> Option<usize> {
161		let mut out = self.len().checked_sub(1)?;
162		match self.domain() {
163			Domain::Enclave(elem) => {
164				let val = elem.load_value();
165				let dead_bits =
166					bits_of::<T::Mem>() - elem.tail().into_inner() as usize;
167				if has_one(val, elem.mask().into_inner()) {
168					out -= val.trailing_zeros() as usize - dead_bits as usize;
169					return Some(out);
170				}
171				None
172			},
173			Domain::Region { head, body, tail } => {
174				if let Some(elem) = tail {
175					let val = elem.load_value();
176					let dead_bits =
177						bits_of::<T::Mem>() - elem.tail().into_inner() as usize;
178					out -= val.trailing_zeros() as usize - dead_bits;
179					if has_one(val, elem.mask().into_inner()) {
180						return Some(out);
181					}
182				}
183
184				for val in body.iter().map(BitStore::load_value).rev() {
185					out -= val.trailing_zeros() as usize;
186					if has_one(val, !<T::Mem as Integral>::ZERO) {
187						return Some(out);
188					}
189				}
190
191				if let Some(elem) = head {
192					let val = elem.load_value();
193					if has_one(val, elem.mask().into_inner()) {
194						out -= val.trailing_zeros() as usize;
195						return Some(out);
196					}
197				}
198
199				None
200			},
201		}
202	}
203
204	/// Seeks the index of the first `0` bit in the bit-slice.
205	pub(crate) fn sp_first_zero(&self) -> Option<usize> {
206		let mut accum = 0;
207
208		match self.domain() {
209			Domain::Enclave(elem) => {
210				let val = elem.load_value() | !elem.mask().into_inner();
211				accum += val.leading_ones() as usize
212					- elem.head().into_inner() as usize;
213				if has_zero(val, elem.mask().into_inner()) {
214					return Some(accum);
215				}
216				None
217			},
218			Domain::Region { head, body, tail } => {
219				if let Some(elem) = head {
220					let val = elem.load_value() | !elem.mask().into_inner();
221					accum += val.leading_ones() as usize
222						- elem.head().into_inner() as usize;
223					if has_zero(val, elem.mask().into_inner()) {
224						return Some(accum);
225					}
226				}
227
228				for val in body.iter().map(BitStore::load_value) {
229					accum += val.leading_ones() as usize;
230					if has_zero(val, !<T::Mem as Integral>::ZERO) {
231						return Some(accum);
232					}
233				}
234
235				if let Some(elem) = tail {
236					let val = elem.load_value() | !elem.mask().into_inner();
237					accum += val.leading_ones() as usize;
238					if has_zero(val, elem.mask().into_inner()) {
239						return Some(accum);
240					}
241				}
242
243				None
244			},
245		}
246	}
247
248	/// Seeks the index of the last `0` bit in the bit-slice.
249	pub(crate) fn sp_last_zero(&self) -> Option<usize> {
250		let mut out = self.len().checked_sub(1)?;
251		match self.domain() {
252			Domain::Enclave(elem) => {
253				let val = elem.load_value() | !elem.mask().into_inner();
254				let dead_bits =
255					bits_of::<T::Mem>() - elem.tail().into_inner() as usize;
256				if has_zero(val, elem.mask().into_inner()) {
257					out -= val.trailing_ones() as usize - dead_bits;
258					return Some(out);
259				}
260				None
261			},
262			Domain::Region { head, body, tail } => {
263				if let Some(elem) = tail {
264					let val = elem.load_value() | !elem.mask().into_inner();
265					let dead_bits =
266						bits_of::<T::Mem>() - elem.tail().into_inner() as usize;
267					out -= val.trailing_ones() as usize - dead_bits;
268					if has_zero(val, elem.mask().into_inner()) {
269						return Some(out);
270					}
271				}
272
273				for val in body.iter().map(BitStore::load_value).rev() {
274					out -= val.trailing_ones() as usize;
275					if has_zero(val, !<T::Mem as Integral>::ZERO) {
276						return Some(out);
277					}
278				}
279
280				if let Some(elem) = head {
281					let val = elem.load_value() | !elem.mask().into_inner();
282					if has_zero(val, elem.mask().into_inner()) {
283						out -= val.trailing_ones() as usize;
284						return Some(out);
285					}
286				}
287
288				None
289			},
290		}
291	}
292
293	/// Accelerates swapping memory.
294	pub(crate) fn sp_swap_with_bitslice(&mut self, other: &mut Self) {
295		for (this, that) in unsafe {
296			self.chunks_mut(WORD_BITS)
297				.remove_alias()
298				.zip(other.chunks_mut(WORD_BITS).remove_alias())
299		} {
300			let (a, b) = (this.load_be::<usize>(), that.load_be::<usize>());
301			this.store_be(b);
302			that.store_be(a);
303		}
304	}
305}