1use core::convert::TryFrom;
2use core::convert::TryInto;
3
4#[allow(missing_docs)]
5pub struct Bytes<'a> {
6 start: *const u8,
7 end: *const u8,
8 /// INVARIANT: start <= cursor && cursor <= end
9 cursor: *const u8,
10 phantom: core::marker::PhantomData<&'a ()>,
11}
12
13#[allow(missing_docs)]
14impl<'a> Bytes<'a> {
15 #[inline]
16 pub fn new(slice: &'a [u8]) -> Bytes<'a> {
17 let start = slice.as_ptr();
18 // SAFETY: obtain pointer to slice end; start points to slice start.
19 let end = unsafe { start.add(slice.len()) };
20 let cursor = start;
21 Bytes {
22 start,
23 end,
24 cursor,
25 phantom: core::marker::PhantomData,
26 }
27 }
28
29 #[inline]
30 pub fn pos(&self) -> usize {
31 self.cursor as usize - self.start as usize
32 }
33
34 #[inline]
35 pub fn peek(&self) -> Option<u8> {
36 if self.cursor < self.end {
37 // SAFETY: bounds checked
38 Some(unsafe { *self.cursor })
39 } else {
40 None
41 }
42 }
43
44 /// Peek at byte `n` ahead of cursor
45 ///
46 /// # Safety
47 ///
48 /// Caller must ensure that `n <= self.len()`, otherwise `self.cursor.add(n)` is UB.
49 /// That means there are at least `n-1` bytes between `self.cursor` and `self.end`
50 /// and `self.cursor.add(n)` is either `self.end` or points to a valid byte.
51 #[inline]
52 pub unsafe fn peek_ahead(&self, n: usize) -> Option<u8> {
53 debug_assert!(n <= self.len());
54 // SAFETY: by preconditions
55 let p = unsafe { self.cursor.add(n) };
56 if p < self.end {
57 // SAFETY: by preconditions, if this is not `self.end`,
58 // then it is safe to dereference
59 Some(unsafe { *p })
60 } else {
61 None
62 }
63 }
64
65 #[inline]
66 pub fn peek_n<'b: 'a, U: TryFrom<&'a [u8]>>(&'b self, n: usize) -> Option<U> {
67 // TODO: once we bump MSRC, use const generics to allow only [u8; N] reads
68 // TODO: drop `n` arg in favour of const
69 // let n = core::mem::size_of::<U>();
70 self.as_ref().get(..n)?.try_into().ok()
71 }
72
73 /// Advance by 1, equivalent to calling `advance(1)`.
74 ///
75 /// # Safety
76 ///
77 /// Caller must ensure that Bytes hasn't been advanced/bumped by more than [`Bytes::len()`].
78 #[inline]
79 pub unsafe fn bump(&mut self) {
80 self.advance(1)
81 }
82
83 /// Advance cursor by `n`
84 ///
85 /// # Safety
86 ///
87 /// Caller must ensure that Bytes hasn't been advanced/bumped by more than [`Bytes::len()`].
88 #[inline]
89 pub unsafe fn advance(&mut self, n: usize) {
90 self.cursor = self.cursor.add(n);
91 debug_assert!(self.cursor <= self.end, "overflow");
92 }
93
94 #[inline]
95 pub fn len(&self) -> usize {
96 self.end as usize - self.cursor as usize
97 }
98
99 #[inline]
100 pub fn is_empty(&self) -> bool {
101 self.len() == 0
102 }
103
104 #[inline]
105 pub fn slice(&mut self) -> &'a [u8] {
106 // SAFETY: not moving position at all, so it's safe
107 let slice = unsafe { slice_from_ptr_range(self.start, self.cursor) };
108 self.commit();
109 slice
110 }
111
112 // TODO: this is an anti-pattern, should be removed
113 /// Deprecated. Do not use!
114 /// # Safety
115 ///
116 /// Caller must ensure that `skip` is at most the number of advances (i.e., `bytes.advance(3)`
117 /// implies a skip of at most 3).
118 #[inline]
119 pub unsafe fn slice_skip(&mut self, skip: usize) -> &'a [u8] {
120 debug_assert!(skip <= self.cursor.offset_from(self.start) as usize);
121 let head = slice_from_ptr_range(self.start, self.cursor.sub(skip));
122 self.commit();
123 head
124 }
125
126 #[inline]
127 pub fn commit(&mut self) {
128 self.start = self.cursor
129 }
130
131 /// # Safety
132 ///
133 /// see [`Bytes::advance`] safety comment.
134 #[inline]
135 pub unsafe fn advance_and_commit(&mut self, n: usize) {
136 self.advance(n);
137 self.commit();
138 }
139
140 #[inline]
141 pub fn as_ptr(&self) -> *const u8 {
142 self.cursor
143 }
144
145 #[inline]
146 pub fn start(&self) -> *const u8 {
147 self.start
148 }
149
150 #[inline]
151 pub fn end(&self) -> *const u8 {
152 self.end
153 }
154
155 /// # Safety
156 ///
157 /// Must ensure invariant `bytes.start() <= ptr && ptr <= bytes.end()`.
158 #[inline]
159 pub unsafe fn set_cursor(&mut self, ptr: *const u8) {
160 debug_assert!(ptr >= self.start);
161 debug_assert!(ptr <= self.end);
162 self.cursor = ptr;
163 }
164}
165
166impl AsRef<[u8]> for Bytes<'_> {
167 #[inline]
168 fn as_ref(&self) -> &[u8] {
169 // SAFETY: not moving position at all, so it's safe
170 unsafe { slice_from_ptr_range(self.cursor, self.end) }
171 }
172}
173
174/// # Safety
175///
176/// Must ensure start and end point to the same memory object to uphold memory safety.
177#[inline]
178unsafe fn slice_from_ptr_range<'a>(start: *const u8, end: *const u8) -> &'a [u8] {
179 debug_assert!(start <= end);
180 core::slice::from_raw_parts(data:start, len:end as usize - start as usize)
181}
182
183impl Iterator for Bytes<'_> {
184 type Item = u8;
185
186 #[inline]
187 fn next(&mut self) -> Option<u8> {
188 if self.cursor < self.end {
189 // SAFETY: bounds checked dereference
190 unsafe {
191 let b: u8 = *self.cursor;
192 self.bump();
193 Some(b)
194 }
195 } else {
196 None
197 }
198 }
199}
200