heapless_bytes/
lib.rs

1//! # heapless-bytes
2//!
3//! Newtype around heapless byte Vec with efficient serde.
4
5#![cfg_attr(not(test), no_std)]
6#![allow(clippy::result_unit_err)]
7
8use core::{
9    cmp::Ordering,
10    fmt::{self, Debug},
11    hash::{Hash, Hasher},
12    ops::{Deref, DerefMut},
13};
14
15use heapless::Vec;
16
17use serde::{
18    de::{Deserialize, Deserializer, Visitor},
19    ser::{Serialize, Serializer},
20};
21
22#[derive(Clone, Default, Eq, Ord)]
23pub struct Bytes<const N: usize> {
24    bytes: Vec<u8, N>,
25}
26
27pub type Bytes8 = Bytes<8>;
28pub type Bytes16 = Bytes<16>;
29pub type Bytes32 = Bytes<32>;
30pub type Bytes64 = Bytes<64>;
31
32#[cfg(feature = "heapless-0.8")]
33impl<const N: usize, const M: usize> From<Vec<u8, M>> for Bytes<N> {
34    fn from(vec: Vec<u8, M>) -> Self {
35        Bytes { bytes: vec }.increase_capacity()
36    }
37}
38
39#[cfg(feature = "heapless-0.8")]
40impl<const N: usize, const M: usize> From<Bytes<M>> for Vec<u8, N> {
41    fn from(value: Bytes<M>) -> Self {
42        value.increase_capacity().bytes
43    }
44}
45
46impl<const N: usize> TryFrom<&[u8]> for Bytes<N> {
47    type Error = ();
48    fn try_from(value: &[u8]) -> Result<Self, ()> {
49        Ok(Self {
50            bytes: Vec::from_slice(value)?,
51        })
52    }
53}
54
55impl<const N: usize> Bytes<N> {
56    /// Construct a new, empty `Bytes<N>`.
57    pub fn new() -> Self {
58        Self { bytes: Vec::new() }
59    }
60
61    pub fn as_ptr(&self) -> *const u8 {
62        self.bytes.as_ptr()
63    }
64
65    /// Returns a raw pointer to the vector’s buffer, which may be mutated through
66    pub fn as_mut_ptr(&mut self) -> *mut u8 {
67        self.bytes.as_mut_ptr()
68    }
69
70    /// Extracts a slice containing the entire buffer.
71    pub fn as_slice(&self) -> &[u8] {
72        self.bytes.as_slice()
73    }
74
75    /// Extracts a mutable slice containing the entire buffer.
76    pub fn as_mut_slice(&mut self) -> &mut [u8] {
77        self.bytes.as_mut_slice()
78    }
79
80    /// Get the capacity of the buffer.
81    ///
82    /// Always equal to the `N` const generic.
83    pub const fn capacity(&self) -> usize {
84        self.bytes.capacity()
85    }
86
87    /// Clear the buffer, making it empty
88    pub fn clear(&mut self) {
89        self.bytes.clear()
90    }
91
92    /// Extends the buffer from an iterator.
93    ///
94    /// # Panic
95    ///
96    /// Panics if the buffer cannot hold all elements of the iterator.
97    #[deprecated(
98        since = "0.4.0",
99        note = "Panics when out of capacity, use try_extend instead"
100    )]
101    pub fn extend<I: IntoIterator<Item = u8>>(&mut self, iter: I) {
102        self.bytes.extend(iter)
103    }
104
105    /// Extends the buffer from an iterator.
106    ///
107    /// Returns [`Err`] if out of capacity
108    pub fn try_extend<I: IntoIterator<Item = u8>>(&mut self, iter: I) -> Result<(), ()> {
109        for b in iter {
110            self.push(b)?;
111        }
112        Ok(())
113    }
114
115    /// Extend the buffer with the contents of a slice
116    pub fn extend_from_slice(&mut self, other: &[u8]) -> Result<(), ()> {
117        self.bytes.extend_from_slice(other)
118    }
119
120    /// Removes the last byte from the buffer and returns it, or `None` if it's empty
121    pub fn pop(&mut self) -> Option<u8> {
122        self.bytes.pop()
123    }
124
125    /// Appends a byte to the back of the collection
126    pub fn push(&mut self, byte: u8) -> Result<(), ()> {
127        self.bytes.push(byte).map_err(drop)
128    }
129
130    /// Removes the last byte from the buffer and returns it
131    ///
132    /// # Safety
133    ///
134    /// This assumes the buffer to have at least one element.
135    pub unsafe fn pop_unchecked(&mut self) -> u8 {
136        unsafe { self.bytes.pop_unchecked() }
137    }
138
139    /// Appends a byte to the back of the buffer
140    ///
141    /// # Safety
142    ///
143    /// This assumes the buffer is not full.
144    pub unsafe fn push_unchecked(&mut self, byte: u8) {
145        unsafe {
146            self.bytes.push_unchecked(byte);
147        }
148    }
149
150    /// Shortens the buffer, keeping the first `len` elements and dropping the rest.
151    pub fn truncate(&mut self, len: usize) {
152        self.bytes.truncate(len)
153    }
154
155    /// Resizes the buffer in-place so that len is equal to new_len.
156    ///
157    /// If new_len is greater than len, the buffer is extended by the
158    /// difference, with each additional slot filled with `value`. If
159    /// `new_len` is less than `len`, the buffer is simply truncated.
160    ///
161    /// See also [`resize_zero`](Self::resize_zero).
162    pub fn resize(&mut self, new_len: usize, value: u8) -> Result<(), ()> {
163        self.bytes.resize(new_len, value)
164    }
165
166    /// Resizes the buffer in-place so that len is equal to new_len.
167    ///
168    /// If new_len is greater than len, the buffer is extended by the
169    /// difference, with each additional slot filled with `0`. If
170    /// `new_len` is less than `len`, the buffer is simply truncated.
171    pub fn resize_zero(&mut self, new_len: usize) -> Result<(), ()> {
172        self.bytes.resize_default(new_len)
173    }
174
175    /// Forces the length of the buffer to `new_len`.
176    ///
177    /// This is a low-level operation that maintains none of the normal
178    /// invariants of the type. Normally changing the length of a buffer
179    /// is done using one of the safe operations instead, such as
180    /// [`truncate`], [`resize`], [`extend`], or [`clear`].
181    ///
182    /// [`truncate`]: Self::truncate
183    /// [`resize`]: Self::resize
184    /// [`extend`]: core::iter::Extend
185    /// [`clear`]: Self::clear
186    ///
187    /// # Safety
188    ///
189    /// - `new_len` must be less than or equal to [`capacity()`].
190    /// - The elements at `old_len..new_len` must be initialized.
191    ///
192    /// [`capacity()`]: Self::capacity
193    ///
194    pub unsafe fn set_len(&mut self, new_len: usize) {
195        self.bytes.set_len(new_len)
196    }
197
198    /// Removes a byte  from the buffer and returns it.
199    ///
200    /// The removed byte is replaced by the last byte of the vector.
201    ///
202    /// This does not preserve ordering, but is *O*(1).
203    ///
204    /// # Panics
205    ///
206    /// Panics if `index` is out of bounds.
207    pub fn swap_remove(&mut self, index: usize) -> u8 {
208        self.bytes.swap_remove(index)
209    }
210
211    /// Removes a byte  from the buffer and returns it.
212    ///
213    /// The removed byte is replaced by the last byte of the vector.
214    ///
215    /// This does not preserve ordering, but is *O*(1).
216    ///
217    /// # Safety
218    ///
219    /// `index` must not be out of bounds.
220    pub unsafe fn swap_remove_unchecked(&mut self, index: usize) -> u8 {
221        unsafe { self.bytes.swap_remove_unchecked(index) }
222    }
223
224    /// Returns true if the buffer is full
225    pub fn is_full(&self) -> bool {
226        self.bytes.is_full()
227    }
228
229    /// Returns true if the buffer is empty
230    pub fn is_empty(&self) -> bool {
231        self.bytes.is_empty()
232    }
233
234    /// Returns `true` if `needle` is a prefix of the buffer.
235    ///
236    /// Always returns `true` if `needle` is an empty slice.
237    pub fn starts_with(&self, needle: &[u8]) -> bool {
238        self.bytes.starts_with(needle)
239    }
240
241    /// Returns `true` if `needle` is a suffix of the buffer.
242    ///
243    /// Always returns `true` if `needle` is an empty slice.
244    pub fn ends_with(&self, needle: &[u8]) -> bool {
245        self.bytes.ends_with(needle)
246    }
247
248    /// Inserts a byte at position `index` within the buffer, shifting all
249    /// bytes after it to the right.
250    ///
251    /// # Panics
252    ///
253    /// Panics if `index > len`.
254    pub fn insert(&mut self, index: usize, value: u8) -> Result<(), ()> {
255        self.bytes.insert(index, value).map_err(drop)
256    }
257
258    /// Removes and return the byte at position `index` within the buffer, shifting all
259    /// bytes after it to the left.
260    ///
261    /// # Panics
262    ///
263    /// Panics if `index > len`.
264    pub fn remove(&mut self, index: usize) -> u8 {
265        self.bytes.remove(index)
266    }
267
268    /// Retains only the bytes specified by the predicate.
269    ///
270    /// In other words, remove all bytes `b` for which `f(&b)` returns `false`.
271    /// This method operates in place, visiting each element exactly once in the
272    /// original order, and preserves the order of the retained elements.
273    pub fn retain(&mut self, f: impl FnMut(&u8) -> bool) {
274        self.bytes.retain(f)
275    }
276    /// Retains only the bytes specified by the predicate, passing a mutable reference to it.
277    ///
278    /// In other words, remove all bytes `b` for which `f(&mut b)` returns `false`.
279    /// This method operates in place, visiting each element exactly once in the
280    /// original order, and preserves the order of the retained elements.
281    pub fn retain_mut(&mut self, f: impl FnMut(&mut u8) -> bool) {
282        self.bytes.retain_mut(f)
283    }
284
285    pub fn resize_to_capacity(&mut self) {
286        self.bytes.resize_default(self.bytes.capacity()).ok();
287    }
288
289    /// Low-noise conversion between lengths.
290    ///
291    /// For an infaillible version when `M` is known to be larger than `N`, see [`increase_capacity`](Self::increase_capacity)
292    pub fn resize_capacity<const M: usize>(&self) -> Result<Bytes<M>, ()> {
293        Bytes::try_from(&**self)
294    }
295
296    /// Copy the contents of this `Bytes` instance into a new instance with a higher capacity.
297    ///
298    /// ```
299    /// # use heapless_bytes::Bytes;
300    /// let bytes32: Bytes<32> = Bytes::from([0; 32]);
301    /// let bytes64: Bytes<64> = bytes32.increase_capacity();
302    /// assert_eq!(bytes64.len(), 32);
303    /// assert_eq!(bytes64.capacity(), 64);
304    /// ```
305    ///
306    /// Decreasing the capacity causes a compiler error:
307    /// ```compile_fail
308    /// # use heapless_bytes::Bytes;
309    /// let bytes32: Bytes<32> = Bytes::from([0; 32]);
310    /// let bytes16: Bytes<16> = bytes32.increase_capacity();
311    /// ```
312    pub fn increase_capacity<const M: usize>(&self) -> Bytes<M> {
313        let () = AssertLessThanEq::<N, M>::ASSERT;
314        let mut bytes = Vec::new();
315        // bytes has length 0 and capacity M, self has length N, N <= M, so this can never panic
316        bytes.extend_from_slice(self.as_slice()).unwrap();
317        Bytes { bytes }
318    }
319}
320
321/// Construct a `Bytes<N>` instance from an array with `N` elements.
322///
323/// Currently, the array is copied, but a more efficient implementation could be used in the
324/// future.
325///
326/// ```
327/// # use heapless_bytes::Bytes;
328/// let bytes: Bytes<3> = Bytes::from([0, 1, 2]);
329/// ```
330///
331/// Length mismatches cause a compiler error:
332/// ```compile_fail
333/// # use heapless_bytes::Bytes;
334/// let bytes: Bytes<3> = Bytes::from([0, 1]);  // does not compile
335/// ```
336/// ```compile_fail
337/// # use heapless_bytes::Bytes;
338/// let bytes: Bytes<3> = Bytes::from([0, 1, 2, 3]);  // does not compile
339/// ```
340impl<const N: usize> From<[u8; N]> for Bytes<N> {
341    fn from(bytes: [u8; N]) -> Self {
342        Self::from(&bytes)
343    }
344}
345
346struct AssertLessThanEq<const I: usize, const J: usize>;
347
348impl<const I: usize, const J: usize> AssertLessThanEq<I, J> {
349    const ASSERT: () = assert!(I <= J, "Cannot convert infallibly between two arrays when the capacity of the new array is not sufficient");
350}
351
352/// Construct a `Bytes<N>` instance by copying from an array with `N` or less elements.
353///
354/// ```
355/// # use heapless_bytes::Bytes;
356/// let bytes: Bytes<3> = Bytes::from(&[0, 1, 2]);
357/// let shorter_bytes: Bytes<3> = Bytes::from(&[0, 1]);
358/// ```
359///
360/// Overlong input data causes a compiler error:
361/// ```compile_fail
362/// # use heapless_bytes::Bytes;
363/// let bytes: Bytes<3> = Bytes::from(&[0, 1, 2, 3]);  // does not compile
364/// ```
365impl<const N: usize, const M: usize> From<&[u8; M]> for Bytes<N> {
366    fn from(data: &[u8; M]) -> Self {
367        let () = AssertLessThanEq::<M, N>::ASSERT;
368        let mut bytes = Vec::new();
369        // vec has length 0 and capacity N, bytes has length M, M <= N, so this can never panic
370        bytes.extend_from_slice(data).unwrap();
371        Bytes { bytes }
372    }
373}
374
375// impl<N, E, F> TryFrom<F> for Bytes<N>
376// where
377//     N: ArrayLength<u8>,
378//     F: FnOnce(&mut [u8]) -> Result<usize, E>,
379// {
380//     type Error = E;
381
382//     fn try_from(f: F) -> Result<Self, Self::Error>  {
383
384//         let mut data = Self::new();
385//         data.resize_to_capacity();
386//         let result = f(&mut data);
387
388//         result.map(|count| {
389//             data.resize_default(count).unwrap();
390//             data
391//         })
392//     }
393// }
394
395impl<const N: usize> Debug for Bytes<N> {
396    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
397        // TODO: There has to be a better way :'-)
398
399        use core::ascii::escape_default;
400        f.write_str("b'")?;
401        for byte in &self.bytes {
402            write!(f, "{}", escape_default(*byte))?;
403        }
404        f.write_str("'")?;
405        Ok(())
406    }
407}
408
409impl<const N: usize> AsRef<[u8]> for Bytes<N> {
410    fn as_ref(&self) -> &[u8] {
411        &self.bytes
412    }
413}
414
415impl<const N: usize> AsMut<[u8]> for Bytes<N> {
416    fn as_mut(&mut self) -> &mut [u8] {
417        &mut self.bytes
418    }
419}
420
421impl<const N: usize> Deref for Bytes<N> {
422    type Target = [u8];
423
424    fn deref(&self) -> &Self::Target {
425        &self.bytes
426    }
427}
428
429impl<const N: usize> DerefMut for Bytes<N> {
430    fn deref_mut(&mut self) -> &mut Self::Target {
431        &mut self.bytes
432    }
433}
434
435impl<Rhs, const N: usize> PartialEq<Rhs> for Bytes<N>
436where
437    Rhs: ?Sized + AsRef<[u8]>,
438{
439    fn eq(&self, other: &Rhs) -> bool {
440        self.as_ref().eq(other.as_ref())
441    }
442}
443
444impl<Rhs, const N: usize> PartialOrd<Rhs> for Bytes<N>
445where
446    Rhs: ?Sized + AsRef<[u8]>,
447{
448    fn partial_cmp(&self, other: &Rhs) -> Option<Ordering> {
449        self.as_ref().partial_cmp(other.as_ref())
450    }
451}
452
453impl<const N: usize> Hash for Bytes<N> {
454    fn hash<H: Hasher>(&self, state: &mut H) {
455        self.bytes.hash(state);
456    }
457}
458
459#[derive(Clone)]
460pub struct IntoIter<const N: usize> {
461    inner: <Vec<u8, N> as IntoIterator>::IntoIter,
462}
463
464impl<const N: usize> Iterator for IntoIter<N> {
465    type Item = u8;
466    fn next(&mut self) -> Option<Self::Item> {
467        self.inner.next()
468    }
469}
470
471impl<const N: usize> IntoIterator for Bytes<N> {
472    type Item = u8;
473    type IntoIter = IntoIter<N>;
474
475    fn into_iter(self) -> Self::IntoIter {
476        IntoIter {
477            inner: self.bytes.into_iter(),
478        }
479    }
480}
481
482impl<'a, const N: usize> IntoIterator for &'a Bytes<N> {
483    type Item = &'a u8;
484    type IntoIter = <&'a [u8] as IntoIterator>::IntoIter;
485
486    fn into_iter(self) -> Self::IntoIter {
487        self.bytes.iter()
488    }
489}
490
491impl<'a, const N: usize> IntoIterator for &'a mut Bytes<N> {
492    type Item = &'a mut u8;
493    type IntoIter = <&'a mut [u8] as IntoIterator>::IntoIter;
494
495    fn into_iter(self) -> Self::IntoIter {
496        self.bytes.iter_mut()
497    }
498}
499
500impl<const N: usize> Serialize for Bytes<N> {
501    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
502    where
503        S: Serializer,
504    {
505        serializer.serialize_bytes(self)
506    }
507}
508
509impl<const N: usize> core::fmt::Write for Bytes<N> {
510    fn write_str(&mut self, s: &str) -> fmt::Result {
511        self.bytes.write_str(s)
512    }
513    fn write_char(&mut self, s: char) -> fmt::Result {
514        self.bytes.write_char(s)
515    }
516    fn write_fmt(&mut self, s: core::fmt::Arguments<'_>) -> fmt::Result {
517        self.bytes.write_fmt(s)
518    }
519}
520
521impl<'de, const N: usize> Deserialize<'de> for Bytes<N> {
522    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
523    where
524        D: Deserializer<'de>,
525    {
526        struct ValueVisitor<const N: usize>;
527
528        impl<'de, const N: usize> Visitor<'de> for ValueVisitor<N> {
529            type Value = Bytes<N>;
530
531            fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
532                formatter.write_str("a sequence of bytes")
533            }
534
535            fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
536            where
537                E: serde::de::Error,
538            {
539                Bytes::try_from(v).map_err(|()| E::invalid_length(v.len(), &self))
540            }
541
542            fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
543            where
544                A: serde::de::SeqAccess<'de>,
545            {
546                use serde::de::Error;
547
548                let mut this = Bytes::new();
549                while let Some(byte) = seq.next_element()? {
550                    this.push(byte)
551                        .map_err(|()| A::Error::invalid_length(this.len(), &self))?;
552                }
553                Ok(this)
554            }
555        }
556
557        deserializer.deserialize_bytes(ValueVisitor)
558    }
559}
560
561#[cfg(test)]
562mod tests {
563    use super::*;
564    use serde_test::{assert_tokens, Token};
565
566    #[test]
567    fn serde() {
568        let mut bytes = Bytes::<0>::new();
569        assert!(bytes.push(1).is_err());
570        assert_tokens(&bytes, &[Token::Bytes(&[])]);
571
572        let mut bytes = Bytes::<16>::new();
573        bytes.push(1).unwrap();
574        assert_tokens(&bytes, &[Token::Bytes(&[1])]);
575        assert!(bytes.extend_from_slice(&[2; 16]).is_err());
576        assert_eq!(&*bytes, &[1]);
577        assert!(bytes.extend_from_slice(&[2; 15]).is_ok());
578        assert_eq!(&*bytes, &[1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]);
579        assert_tokens(
580            &bytes,
581            &[Token::Bytes(&[
582                1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
583            ])],
584        );
585    }
586
587    #[test]
588    fn display() {
589        assert_eq!(
590            r"b'\x00abcde\n'",
591            format!(
592                "{:?}",
593                Bytes::<10>::try_from(b"\0abcde\n".as_slice()).unwrap()
594            )
595        );
596    }
597
598    #[test]
599    fn from() {
600        let _: Bytes<10> = [0; 10].into();
601        let _: Bytes<10> = (&[0; 8]).into();
602        #[cfg(feature = "heapless-0.8")]
603        let _: Bytes<10> = Vec::<u8, 10>::new().into();
604        #[cfg(feature = "heapless-0.8")]
605        let _: Bytes<10> = Vec::<u8, 9>::new().into();
606    }
607}