1use crate::iter::Bytes;
2
3#[target_feature(enable = "sse4.2")]
4pub unsafe fn match_uri_vectored(bytes: &mut Bytes) {
5 while bytes.as_ref().len() >= 16 {
6 let advance = match_url_char_16_sse(bytes.as_ref());
7 bytes.advance(advance);
8
9 if advance != 16 {
10 return;
11 }
12 }
13 super::swar::match_uri_vectored(bytes);
14}
15
16#[inline(always)]
17#[allow(non_snake_case, overflowing_literals)]
18unsafe fn match_url_char_16_sse(buf: &[u8]) -> usize {
19 debug_assert!(buf.len() >= 16);
20
21 #[cfg(target_arch = "x86")]
22 use core::arch::x86::*;
23 #[cfg(target_arch = "x86_64")]
24 use core::arch::x86_64::*;
25
26 let ptr = buf.as_ptr();
27
28 let LSH: __m128i = _mm_set1_epi8(0x0f);
29
30 let URI: __m128i = _mm_setr_epi8(
45 0xf8, 0xfc, 0xfc, 0xfc, 0xfc, 0xfc, 0xfc, 0xfc,
46 0xfc, 0xfc, 0xfc, 0xfc, 0xf4, 0xfc, 0xf4, 0x7c,
47 );
48 let ARF: __m128i = _mm_setr_epi8(
49 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80,
50 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
51 );
52
53 let data = _mm_lddqu_si128(ptr as *const _);
54 let rbms = _mm_shuffle_epi8(URI, data);
55 let cols = _mm_and_si128(LSH, _mm_srli_epi16(data, 4));
56 let bits = _mm_and_si128(_mm_shuffle_epi8(ARF, cols), rbms);
57
58 let v = _mm_cmpeq_epi8(bits, _mm_setzero_si128());
59 let r = _mm_movemask_epi8(v) as u16;
60
61 r.trailing_zeros() as usize
62}
63
64#[target_feature(enable = "sse4.2")]
65pub unsafe fn match_header_value_vectored(bytes: &mut Bytes) {
66 while bytes.as_ref().len() >= 16 {
67 let advance = match_header_value_char_16_sse(bytes.as_ref());
68 bytes.advance(advance);
69
70 if advance != 16 {
71 return;
72 }
73 }
74 super::swar::match_header_value_vectored(bytes);
75}
76
77#[inline(always)]
78#[allow(non_snake_case)]
79unsafe fn match_header_value_char_16_sse(buf: &[u8]) -> usize {
80 debug_assert!(buf.len() >= 16);
81
82 #[cfg(target_arch = "x86")]
83 use core::arch::x86::*;
84 #[cfg(target_arch = "x86_64")]
85 use core::arch::x86_64::*;
86
87 let ptr = buf.as_ptr();
88
89 let TAB: __m128i = _mm_set1_epi8(0x09);
91 let DEL: __m128i = _mm_set1_epi8(0x7f);
92 let LOW: __m128i = _mm_set1_epi8(0x20);
93
94 let dat = _mm_lddqu_si128(ptr as *const _);
95 let low = _mm_cmpeq_epi8(_mm_max_epu8(dat, LOW), dat);
97 let tab = _mm_cmpeq_epi8(dat, TAB);
98 let del = _mm_cmpeq_epi8(dat, DEL);
99 let bit = _mm_andnot_si128(del, _mm_or_si128(low, tab));
100 let res = _mm_movemask_epi8(bit) as u16;
101
102 (!res).trailing_zeros() as usize
104}
105
106#[test]
107fn sse_code_matches_uri_chars_table() {
108 if !is_x86_feature_detected!("sse4.2") {
109 return;
110 }
111
112 #[allow(clippy::undocumented_unsafe_blocks)]
113 unsafe {
114 assert!(byte_is_allowed(b'_', match_uri_vectored));
115
116 for (b, allowed) in crate::URI_MAP.iter().cloned().enumerate() {
117 assert_eq!(
118 byte_is_allowed(b as u8, match_uri_vectored), allowed,
119 "byte_is_allowed({:?}) should be {:?}", b, allowed,
120 );
121 }
122 }
123}
124
125#[test]
126fn sse_code_matches_header_value_chars_table() {
127 if !is_x86_feature_detected!("sse4.2") {
128 return;
129 }
130
131 #[allow(clippy::undocumented_unsafe_blocks)]
132 unsafe {
133 assert!(byte_is_allowed(b'_', match_header_value_vectored));
134
135 for (b, allowed) in crate::HEADER_VALUE_MAP.iter().cloned().enumerate() {
136 assert_eq!(
137 byte_is_allowed(b as u8, match_header_value_vectored), allowed,
138 "byte_is_allowed({:?}) should be {:?}", b, allowed,
139 );
140 }
141 }
142}
143
144#[allow(clippy::missing_safety_doc)]
145#[cfg(test)]
146unsafe fn byte_is_allowed(byte: u8, f: unsafe fn(bytes: &mut Bytes<'_>)) -> bool {
147 let slice = [
148 b'_', b'_', b'_', b'_',
149 b'_', b'_', b'_', b'_',
150 b'_', b'_', byte, b'_',
151 b'_', b'_', b'_', b'_',
152 ];
153 let mut bytes = Bytes::new(&slice);
154
155 f(&mut bytes);
156
157 match bytes.pos() {
158 16 => true,
159 10 => false,
160 _ => unreachable!(),
161 }
162}