1use core::convert::TryInto;
2use core::convert::TryFrom;
3
4#[allow(missing_docs)]
5pub struct Bytes<'a> {
6 start: *const u8,
7 end: *const u8,
8 cursor: *const u8,
10 phantom: core::marker::PhantomData<&'a ()>,
11}
12
13#[allow(missing_docs)]
14impl<'a> Bytes<'a> {
15 #[inline]
16 pub fn new(slice: &'a [u8]) -> Bytes<'a> {
17 let start = slice.as_ptr();
18 let end = unsafe { start.add(slice.len()) };
20 let cursor = start;
21 Bytes {
22 start,
23 end,
24 cursor,
25 phantom: core::marker::PhantomData,
26 }
27 }
28
29 #[inline]
30 pub fn pos(&self) -> usize {
31 self.cursor as usize - self.start as usize
32 }
33
34 #[inline]
35 pub fn peek(&self) -> Option<u8> {
36 if self.cursor < self.end {
37 Some(unsafe { *self.cursor })
39 } else {
40 None
41 }
42 }
43
44 #[inline]
45 pub fn peek_ahead(&self, n: usize) -> Option<u8> {
46 let ptr = self.cursor.wrapping_add(n);
49 if ptr < self.end {
50 Some(unsafe { *ptr })
52 } else {
53 None
54 }
55 }
56
57 #[inline]
58 pub fn peek_n<'b: 'a, U: TryFrom<&'a [u8]>>(&'b self, n: usize) -> Option<U> {
59 self.as_ref().get(..n)?.try_into().ok()
63 }
64
65 #[inline]
71 pub unsafe fn bump(&mut self) {
72 self.advance(1)
73 }
74
75 #[inline]
81 pub unsafe fn advance(&mut self, n: usize) {
82 self.cursor = self.cursor.add(n);
83 debug_assert!(self.cursor <= self.end, "overflow");
84 }
85
86 #[inline]
87 pub fn len(&self) -> usize {
88 self.end as usize - self.cursor as usize
89 }
90
91 #[inline]
92 pub fn is_empty(&self) -> bool {
93 self.len() == 0
94 }
95
96 #[inline]
97 pub fn slice(&mut self) -> &'a [u8] {
98 let slice = unsafe { slice_from_ptr_range(self.start, self.cursor) };
100 self.commit();
101 slice
102 }
103
104 #[inline]
111 pub unsafe fn slice_skip(&mut self, skip: usize) -> &'a [u8] {
112 debug_assert!(self.cursor.sub(skip) >= self.start);
113 let head = slice_from_ptr_range(self.start, self.cursor.sub(skip));
114 self.commit();
115 head
116 }
117
118 #[inline]
119 pub fn commit(&mut self) {
120 self.start = self.cursor
121 }
122
123 #[inline]
127 pub unsafe fn advance_and_commit(&mut self, n: usize) {
128 self.advance(n);
129 self.commit();
130 }
131
132 #[inline]
133 pub fn as_ptr(&self) -> *const u8 {
134 self.cursor
135 }
136
137 #[inline]
138 pub fn start(&self) -> *const u8 {
139 self.start
140 }
141
142 #[inline]
143 pub fn end(&self) -> *const u8 {
144 self.end
145 }
146
147 #[inline]
151 pub unsafe fn set_cursor(&mut self, ptr: *const u8) {
152 debug_assert!(ptr >= self.start);
153 debug_assert!(ptr <= self.end);
154 self.cursor = ptr;
155 }
156}
157
158impl<'a> AsRef<[u8]> for Bytes<'a> {
159 #[inline]
160 fn as_ref(&self) -> &[u8] {
161 unsafe { slice_from_ptr_range(self.cursor, self.end) }
163 }
164}
165
166#[inline]
170unsafe fn slice_from_ptr_range<'a>(start: *const u8, end: *const u8) -> &'a [u8] {
171 debug_assert!(start <= end);
172 core::slice::from_raw_parts(start, end as usize - start as usize)
173}
174
175impl<'a> Iterator for Bytes<'a> {
176 type Item = u8;
177
178 #[inline]
179 fn next(&mut self) -> Option<u8> {
180 if self.cursor < self.end {
181 unsafe {
183 let b = *self.cursor;
184 self.bump();
185 Some(b)
186 }
187 } else {
188 None
189 }
190 }
191}