1 | use crate::iter::Bytes; |
2 | |
3 | pub enum Scan { |
4 | /// Returned when an implementation finds a noteworthy token. |
5 | Found, |
6 | /// Returned when an implementation couldn't keep running because the input was too short. |
7 | TooShort, |
8 | } |
9 | |
10 | |
11 | pub unsafe fn parse_uri_batch_32(bytes: &mut Bytes) -> Scan { |
12 | while bytes.as_ref().len() >= 32 { |
13 | let advance = match_url_char_32_avx(bytes.as_ref()); |
14 | bytes.advance(advance); |
15 | |
16 | if advance != 32 { |
17 | return Scan::Found; |
18 | } |
19 | } |
20 | Scan::TooShort |
21 | } |
22 | |
23 | #[cfg (target_arch = "x86_64" )] |
24 | #[target_feature (enable = "avx2" )] |
25 | #[inline ] |
26 | #[allow (non_snake_case, overflowing_literals)] |
27 | unsafe fn match_url_char_32_avx(buf: &[u8]) -> usize { |
28 | debug_assert!(buf.len() >= 32); |
29 | |
30 | /* |
31 | #[cfg(target_arch = "x86")] |
32 | use core::arch::x86::*; |
33 | #[cfg(target_arch = "x86_64")] |
34 | */ |
35 | use core::arch::x86_64::*; |
36 | |
37 | let ptr = buf.as_ptr(); |
38 | |
39 | let LSH: __m256i = _mm256_set1_epi8(0x0f); |
40 | |
41 | // See comment in sse42::match_url_char_16_sse. |
42 | |
43 | let URI: __m256i = _mm256_setr_epi8( |
44 | 0xf8, 0xfc, 0xfc, 0xfc, 0xfc, 0xfc, 0xfc, 0xfc, |
45 | 0xfc, 0xfc, 0xfc, 0xfc, 0xf4, 0xfc, 0xf4, 0x7c, |
46 | 0xf8, 0xfc, 0xfc, 0xfc, 0xfc, 0xfc, 0xfc, 0xfc, |
47 | 0xfc, 0xfc, 0xfc, 0xfc, 0xf4, 0xfc, 0xf4, 0x7c, |
48 | ); |
49 | let ARF: __m256i = _mm256_setr_epi8( |
50 | 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, |
51 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, |
52 | 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, |
53 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, |
54 | ); |
55 | |
56 | let data = _mm256_lddqu_si256(ptr as *const _); |
57 | let rbms = _mm256_shuffle_epi8(URI, data); |
58 | let cols = _mm256_and_si256(LSH, _mm256_srli_epi16(data, 4)); |
59 | let bits = _mm256_and_si256(_mm256_shuffle_epi8(ARF, cols), rbms); |
60 | |
61 | let v = _mm256_cmpeq_epi8(bits, _mm256_setzero_si256()); |
62 | let r = 0xffff_ffff_0000_0000 | _mm256_movemask_epi8(v) as u64; |
63 | |
64 | _tzcnt_u64(r) as usize |
65 | } |
66 | |
67 | #[cfg (target_arch = "x86" )] |
68 | unsafe fn match_url_char_32_avx(_: &[u8]) -> usize { |
69 | unreachable!("AVX2 detection should be disabled for x86" ); |
70 | } |
71 | |
72 | pub unsafe fn match_header_value_batch_32(bytes: &mut Bytes) -> Scan { |
73 | while bytes.as_ref().len() >= 32 { |
74 | let advance = match_header_value_char_32_avx(bytes.as_ref()); |
75 | bytes.advance(advance); |
76 | |
77 | if advance != 32 { |
78 | return Scan::Found; |
79 | } |
80 | } |
81 | Scan::TooShort |
82 | } |
83 | |
84 | #[cfg (target_arch = "x86_64" )] |
85 | #[target_feature (enable = "avx2" )] |
86 | #[inline ] |
87 | #[allow (non_snake_case)] |
88 | unsafe fn match_header_value_char_32_avx(buf: &[u8]) -> usize { |
89 | debug_assert!(buf.len() >= 32); |
90 | |
91 | /* |
92 | #[cfg(target_arch = "x86")] |
93 | use core::arch::x86::*; |
94 | #[cfg(target_arch = "x86_64")] |
95 | */ |
96 | use core::arch::x86_64::*; |
97 | |
98 | let ptr = buf.as_ptr(); |
99 | |
100 | // %x09 %x20-%x7e %x80-%xff |
101 | let TAB: __m256i = _mm256_set1_epi8(0x09); |
102 | let DEL: __m256i = _mm256_set1_epi8(0x7f); |
103 | let LOW: __m256i = _mm256_set1_epi8(0x20); |
104 | |
105 | let dat = _mm256_lddqu_si256(ptr as *const _); |
106 | // unsigned comparison dat >= LOW |
107 | let low = _mm256_cmpeq_epi8(_mm256_max_epu8(dat, LOW), dat); |
108 | let tab = _mm256_cmpeq_epi8(dat, TAB); |
109 | let del = _mm256_cmpeq_epi8(dat, DEL); |
110 | let bit = _mm256_andnot_si256(del, _mm256_or_si256(low, tab)); |
111 | let rev = _mm256_cmpeq_epi8(bit, _mm256_setzero_si256()); |
112 | let res = 0xffff_ffff_0000_0000 | _mm256_movemask_epi8(rev) as u64; |
113 | |
114 | _tzcnt_u64(res) as usize |
115 | } |
116 | |
117 | #[cfg (target_arch = "x86" )] |
118 | unsafe fn match_header_value_char_32_avx(_: &[u8]) -> usize { |
119 | unreachable!("AVX2 detection should be disabled for x86" ); |
120 | } |
121 | |
122 | #[test] |
123 | fn avx2_code_matches_uri_chars_table() { |
124 | match super::detect() { |
125 | super::AVX_2 | super::AVX_2_AND_SSE_42 => {}, |
126 | _ => return, |
127 | } |
128 | |
129 | unsafe { |
130 | assert!(byte_is_allowed(b'_' , parse_uri_batch_32)); |
131 | |
132 | for (b, allowed) in crate::URI_MAP.iter().cloned().enumerate() { |
133 | assert_eq!( |
134 | byte_is_allowed(b as u8, parse_uri_batch_32), allowed, |
135 | "byte_is_allowed({:?}) should be {:?}" , b, allowed, |
136 | ); |
137 | } |
138 | } |
139 | } |
140 | |
141 | #[test] |
142 | fn avx2_code_matches_header_value_chars_table() { |
143 | match super::detect() { |
144 | super::AVX_2 | super::AVX_2_AND_SSE_42 => {}, |
145 | _ => return, |
146 | } |
147 | |
148 | unsafe { |
149 | assert!(byte_is_allowed(b'_' , match_header_value_batch_32)); |
150 | |
151 | for (b, allowed) in crate::HEADER_VALUE_MAP.iter().cloned().enumerate() { |
152 | assert_eq!( |
153 | byte_is_allowed(b as u8, match_header_value_batch_32), allowed, |
154 | "byte_is_allowed({:?}) should be {:?}" , b, allowed, |
155 | ); |
156 | } |
157 | } |
158 | } |
159 | |
160 | #[cfg (test)] |
161 | unsafe fn byte_is_allowed(byte: u8, f: unsafe fn(bytes: &mut Bytes<'_>) -> Scan) -> bool { |
162 | let slice = [ |
163 | b'_' , b'_' , b'_' , b'_' , |
164 | b'_' , b'_' , b'_' , b'_' , |
165 | b'_' , b'_' , b'_' , b'_' , |
166 | b'_' , b'_' , b'_' , b'_' , |
167 | b'_' , b'_' , b'_' , b'_' , |
168 | b'_' , b'_' , b'_' , b'_' , |
169 | b'_' , b'_' , byte, b'_' , |
170 | b'_' , b'_' , b'_' , b'_' , |
171 | ]; |
172 | let mut bytes = Bytes::new(&slice); |
173 | |
174 | f(&mut bytes); |
175 | |
176 | match bytes.pos() { |
177 | 32 => true, |
178 | 26 => false, |
179 | _ => unreachable!(), |
180 | } |
181 | } |
182 | |