1use std::alloc::Layout;
19use std::fmt::Debug;
20use std::ptr::NonNull;
21use std::sync::Arc;
22
23use crate::alloc::{Allocation, Deallocation};
24use crate::util::bit_chunk_iterator::{BitChunks, UnalignedBitChunk};
25use crate::BufferBuilder;
26use crate::{bit_util, bytes::Bytes, native::ArrowNativeType};
27
28use super::ops::bitwise_unary_op_helper;
29use super::{MutableBuffer, ScalarBuffer};
30
31#[derive(Clone, Debug)]
69pub struct Buffer {
70 data: Arc<Bytes>,
72
73 ptr: *const u8,
78
79 length: usize,
83}
84
85impl PartialEq for Buffer {
86 fn eq(&self, other: &Self) -> bool {
87 self.as_slice().eq(other.as_slice())
88 }
89}
90
91impl Eq for Buffer {}
92
93unsafe impl Send for Buffer where Bytes: Send {}
94unsafe impl Sync for Buffer where Bytes: Sync {}
95
96impl Buffer {
97 #[deprecated(since = "54.1.0", note = "Use Buffer::from instead")]
104 pub fn from_bytes(bytes: Bytes) -> Self {
105 Self::from(bytes)
106 }
107
108 pub fn ptr_offset(&self) -> usize {
112 unsafe { self.ptr.offset_from(self.data.ptr().as_ptr()) as usize }
114 }
115
116 pub fn data_ptr(&self) -> NonNull<u8> {
118 self.data.ptr()
119 }
120
121 #[inline]
123 pub fn from_vec<T: ArrowNativeType>(vec: Vec<T>) -> Self {
124 MutableBuffer::from(vec).into()
125 }
126
127 pub fn from_slice_ref<U: ArrowNativeType, T: AsRef<[U]>>(items: T) -> Self {
129 let slice = items.as_ref();
130 let capacity = std::mem::size_of_val(slice);
131 let mut buffer = MutableBuffer::with_capacity(capacity);
132 buffer.extend_from_slice(slice);
133 buffer.into()
134 }
135
136 pub unsafe fn from_custom_allocation(
152 ptr: NonNull<u8>,
153 len: usize,
154 owner: Arc<dyn Allocation>,
155 ) -> Self {
156 Buffer::build_with_arguments(ptr, len, Deallocation::Custom(owner, len))
157 }
158
159 unsafe fn build_with_arguments(
161 ptr: NonNull<u8>,
162 len: usize,
163 deallocation: Deallocation,
164 ) -> Self {
165 let bytes = Bytes::new(ptr, len, deallocation);
166 let ptr = bytes.as_ptr();
167 Buffer {
168 ptr,
169 data: Arc::new(bytes),
170 length: len,
171 }
172 }
173
174 #[inline]
176 pub fn len(&self) -> usize {
177 self.length
178 }
179
180 #[inline]
183 pub fn capacity(&self) -> usize {
184 self.data.capacity()
185 }
186
187 pub fn shrink_to_fit(&mut self) {
197 let offset = self.ptr_offset();
198 let is_empty = self.is_empty();
199 let desired_capacity = if is_empty {
200 0
201 } else {
202 offset + self.len()
204 };
205 if desired_capacity < self.capacity() {
206 if let Some(bytes) = Arc::get_mut(&mut self.data) {
207 if bytes.try_realloc(desired_capacity).is_ok() {
208 self.ptr = if is_empty {
210 bytes.as_ptr()
211 } else {
212 unsafe { bytes.as_ptr().add(offset) }
214 }
215 } else {
216 }
218 }
219 }
220 }
221
222 #[inline]
224 pub fn is_empty(&self) -> bool {
225 self.length == 0
226 }
227
228 pub fn as_slice(&self) -> &[u8] {
230 unsafe { std::slice::from_raw_parts(self.ptr, self.length) }
231 }
232
233 pub(crate) fn deallocation(&self) -> &Deallocation {
234 self.data.deallocation()
235 }
236
237 pub fn slice(&self, offset: usize) -> Self {
246 let mut s = self.clone();
247 s.advance(offset);
248 s
249 }
250
251 #[inline]
257 pub fn advance(&mut self, offset: usize) {
258 assert!(
259 offset <= self.length,
260 "the offset of the new Buffer cannot exceed the existing length: offset={} length={}",
261 offset,
262 self.length
263 );
264 self.length -= offset;
265 self.ptr = unsafe { self.ptr.add(offset) };
270 }
271
272 pub fn slice_with_length(&self, offset: usize, length: usize) -> Self {
281 assert!(
282 offset.saturating_add(length) <= self.length,
283 "the offset of the new Buffer cannot exceed the existing length: slice offset={offset} length={length} selflen={}",
284 self.length
285 );
286 let ptr = unsafe { self.ptr.add(offset) };
289 Self {
290 data: self.data.clone(),
291 ptr,
292 length,
293 }
294 }
295
296 #[inline]
301 pub fn as_ptr(&self) -> *const u8 {
302 self.ptr
303 }
304
305 pub fn typed_data<T: ArrowNativeType>(&self) -> &[T] {
312 let (prefix, offsets, suffix) = unsafe { self.as_slice().align_to::<T>() };
316 assert!(prefix.is_empty() && suffix.is_empty());
317 offsets
318 }
319
320 pub fn bit_slice(&self, offset: usize, len: usize) -> Self {
324 if offset % 8 == 0 {
325 return self.slice_with_length(offset / 8, bit_util::ceil(len, 8));
326 }
327
328 bitwise_unary_op_helper(self, offset, len, |a| a)
329 }
330
331 pub fn bit_chunks(&self, offset: usize, len: usize) -> BitChunks {
335 BitChunks::new(self.as_slice(), offset, len)
336 }
337
338 pub fn count_set_bits_offset(&self, offset: usize, len: usize) -> usize {
341 UnalignedBitChunk::new(self.as_slice(), offset, len).count_ones()
342 }
343
344 pub fn into_mutable(self) -> Result<MutableBuffer, Self> {
350 let ptr = self.ptr;
351 let length = self.length;
352 Arc::try_unwrap(self.data)
353 .and_then(|bytes| {
354 assert_eq!(ptr, bytes.ptr().as_ptr());
356 MutableBuffer::from_bytes(bytes).map_err(Arc::new)
357 })
358 .map_err(|bytes| Buffer {
359 data: bytes,
360 ptr,
361 length,
362 })
363 }
364
365 pub fn into_vec<T: ArrowNativeType>(self) -> Result<Vec<T>, Self> {
376 let layout = match self.data.deallocation() {
377 Deallocation::Standard(l) => l,
378 _ => return Err(self), };
380
381 if self.ptr != self.data.as_ptr() {
382 return Err(self); }
384
385 let v_capacity = layout.size() / std::mem::size_of::<T>();
386 match Layout::array::<T>(v_capacity) {
387 Ok(expected) if layout == &expected => {}
388 _ => return Err(self), }
390
391 let length = self.length;
392 let ptr = self.ptr;
393 let v_len = self.length / std::mem::size_of::<T>();
394
395 Arc::try_unwrap(self.data)
396 .map(|bytes| unsafe {
397 let ptr = bytes.ptr().as_ptr() as _;
398 std::mem::forget(bytes);
399 Vec::from_raw_parts(ptr, v_len, v_capacity)
402 })
403 .map_err(|bytes| Buffer {
404 data: bytes,
405 ptr,
406 length,
407 })
408 }
409
410 #[inline]
414 pub fn ptr_eq(&self, other: &Self) -> bool {
415 self.ptr == other.ptr && self.length == other.length
416 }
417}
418
419impl From<&[u8]> for Buffer {
428 fn from(p: &[u8]) -> Self {
429 Self::from_slice_ref(p)
430 }
431}
432
433impl<const N: usize> From<[u8; N]> for Buffer {
434 fn from(p: [u8; N]) -> Self {
435 Self::from_slice_ref(p)
436 }
437}
438
439impl<const N: usize> From<&[u8; N]> for Buffer {
440 fn from(p: &[u8; N]) -> Self {
441 Self::from_slice_ref(p)
442 }
443}
444
445impl<T: ArrowNativeType> From<Vec<T>> for Buffer {
446 fn from(value: Vec<T>) -> Self {
447 Self::from_vec(value)
448 }
449}
450
451impl<T: ArrowNativeType> From<ScalarBuffer<T>> for Buffer {
452 fn from(value: ScalarBuffer<T>) -> Self {
453 value.into_inner()
454 }
455}
456
457impl From<Bytes> for Buffer {
459 #[inline]
460 fn from(bytes: Bytes) -> Self {
461 let length = bytes.len();
462 let ptr = bytes.as_ptr();
463 Self {
464 data: Arc::new(bytes),
465 ptr,
466 length,
467 }
468 }
469}
470
471impl From<bytes::Bytes> for Buffer {
473 fn from(bytes: bytes::Bytes) -> Self {
474 let bytes: Bytes = bytes.into();
475 Self::from(bytes)
476 }
477}
478
479impl FromIterator<bool> for Buffer {
481 fn from_iter<I>(iter: I) -> Self
482 where
483 I: IntoIterator<Item = bool>,
484 {
485 MutableBuffer::from_iter(iter).into()
486 }
487}
488
489impl std::ops::Deref for Buffer {
490 type Target = [u8];
491
492 fn deref(&self) -> &[u8] {
493 unsafe { std::slice::from_raw_parts(self.as_ptr(), self.len()) }
494 }
495}
496
497impl From<MutableBuffer> for Buffer {
498 #[inline]
499 fn from(buffer: MutableBuffer) -> Self {
500 buffer.into_buffer()
501 }
502}
503
504impl<T: ArrowNativeType> From<BufferBuilder<T>> for Buffer {
505 fn from(mut value: BufferBuilder<T>) -> Self {
506 value.finish()
507 }
508}
509
510impl Buffer {
511 #[inline]
531 pub unsafe fn from_trusted_len_iter<T: ArrowNativeType, I: Iterator<Item = T>>(
532 iterator: I,
533 ) -> Self {
534 MutableBuffer::from_trusted_len_iter(iterator).into()
535 }
536
537 #[inline]
544 pub unsafe fn try_from_trusted_len_iter<
545 E,
546 T: ArrowNativeType,
547 I: Iterator<Item = Result<T, E>>,
548 >(
549 iterator: I,
550 ) -> Result<Self, E> {
551 Ok(MutableBuffer::try_from_trusted_len_iter(iterator)?.into())
552 }
553}
554
555impl<T: ArrowNativeType> FromIterator<T> for Buffer {
556 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
557 let vec = Vec::from_iter(iter);
558 Buffer::from_vec(vec)
559 }
560}
561
562#[cfg(test)]
563mod tests {
564 use crate::i256;
565 use std::panic::{RefUnwindSafe, UnwindSafe};
566 use std::thread;
567
568 use super::*;
569
570 #[test]
571 fn test_buffer_data_equality() {
572 let buf1 = Buffer::from(&[0, 1, 2, 3, 4]);
573 let buf2 = Buffer::from(&[0, 1, 2, 3, 4]);
574 assert_eq!(buf1, buf2);
575
576 let buf3 = buf1.slice(2);
578 assert_ne!(buf1, buf3);
579 let buf4 = buf2.slice_with_length(2, 3);
580 assert_eq!(buf3, buf4);
581
582 let mut buf2 = MutableBuffer::new(65);
584 buf2.extend_from_slice(&[0u8, 1, 2, 3, 4]);
585
586 let buf2 = buf2.into();
587 assert_eq!(buf1, buf2);
588
589 let buf2 = Buffer::from(&[0, 0, 2, 3, 4]);
591 assert_ne!(buf1, buf2);
592
593 let buf2 = Buffer::from(&[0, 1, 2, 3]);
595 assert_ne!(buf1, buf2);
596 }
597
598 #[test]
599 fn test_from_raw_parts() {
600 let buf = Buffer::from(&[0, 1, 2, 3, 4]);
601 assert_eq!(5, buf.len());
602 assert!(!buf.as_ptr().is_null());
603 assert_eq!([0, 1, 2, 3, 4], buf.as_slice());
604 }
605
606 #[test]
607 fn test_from_vec() {
608 let buf = Buffer::from(&[0, 1, 2, 3, 4]);
609 assert_eq!(5, buf.len());
610 assert!(!buf.as_ptr().is_null());
611 assert_eq!([0, 1, 2, 3, 4], buf.as_slice());
612 }
613
614 #[test]
615 fn test_copy() {
616 let buf = Buffer::from(&[0, 1, 2, 3, 4]);
617 let buf2 = buf;
618 assert_eq!(5, buf2.len());
619 assert_eq!(64, buf2.capacity());
620 assert!(!buf2.as_ptr().is_null());
621 assert_eq!([0, 1, 2, 3, 4], buf2.as_slice());
622 }
623
624 #[test]
625 fn test_slice() {
626 let buf = Buffer::from(&[2, 4, 6, 8, 10]);
627 let buf2 = buf.slice(2);
628
629 assert_eq!([6, 8, 10], buf2.as_slice());
630 assert_eq!(3, buf2.len());
631 assert_eq!(unsafe { buf.as_ptr().offset(2) }, buf2.as_ptr());
632
633 let buf3 = buf2.slice_with_length(1, 2);
634 assert_eq!([8, 10], buf3.as_slice());
635 assert_eq!(2, buf3.len());
636 assert_eq!(unsafe { buf.as_ptr().offset(3) }, buf3.as_ptr());
637
638 let buf4 = buf.slice(5);
639 let empty_slice: [u8; 0] = [];
640 assert_eq!(empty_slice, buf4.as_slice());
641 assert_eq!(0, buf4.len());
642 assert!(buf4.is_empty());
643 assert_eq!(buf2.slice_with_length(2, 1).as_slice(), &[10]);
644 }
645
646 #[test]
647 fn test_shrink_to_fit() {
648 let original = Buffer::from(&[0, 1, 2, 3, 4, 5, 6, 7]);
649 assert_eq!(original.as_slice(), &[0, 1, 2, 3, 4, 5, 6, 7]);
650 assert_eq!(original.capacity(), 64);
651
652 let slice = original.slice_with_length(2, 3);
653 drop(original); assert_eq!(slice.as_slice(), &[2, 3, 4]);
655 assert_eq!(slice.capacity(), 64);
656
657 let mut shrunk = slice;
658 shrunk.shrink_to_fit();
659 assert_eq!(shrunk.as_slice(), &[2, 3, 4]);
660 assert_eq!(shrunk.capacity(), 5); let empty_slice = shrunk.slice_with_length(1, 0);
664 drop(shrunk); assert_eq!(empty_slice.as_slice(), &[]);
666 assert_eq!(empty_slice.capacity(), 5);
667
668 let mut shrunk_empty = empty_slice;
669 shrunk_empty.shrink_to_fit();
670 assert_eq!(shrunk_empty.as_slice(), &[]);
671 assert_eq!(shrunk_empty.capacity(), 0);
672 }
673
674 #[test]
675 #[should_panic(expected = "the offset of the new Buffer cannot exceed the existing length")]
676 fn test_slice_offset_out_of_bound() {
677 let buf = Buffer::from(&[2, 4, 6, 8, 10]);
678 buf.slice(6);
679 }
680
681 #[test]
682 fn test_access_concurrently() {
683 let buffer = Buffer::from([1, 2, 3, 4, 5]);
684 let buffer2 = buffer.clone();
685 assert_eq!([1, 2, 3, 4, 5], buffer.as_slice());
686
687 let buffer_copy = thread::spawn(move || {
688 buffer
690 })
691 .join();
692
693 assert!(buffer_copy.is_ok());
694 assert_eq!(buffer2, buffer_copy.ok().unwrap());
695 }
696
697 macro_rules! check_as_typed_data {
698 ($input: expr, $native_t: ty) => {{
699 let buffer = Buffer::from_slice_ref($input);
700 let slice: &[$native_t] = buffer.typed_data::<$native_t>();
701 assert_eq!($input, slice);
702 }};
703 }
704
705 #[test]
706 #[allow(clippy::float_cmp)]
707 fn test_as_typed_data() {
708 check_as_typed_data!(&[1i8, 3i8, 6i8], i8);
709 check_as_typed_data!(&[1u8, 3u8, 6u8], u8);
710 check_as_typed_data!(&[1i16, 3i16, 6i16], i16);
711 check_as_typed_data!(&[1i32, 3i32, 6i32], i32);
712 check_as_typed_data!(&[1i64, 3i64, 6i64], i64);
713 check_as_typed_data!(&[1u16, 3u16, 6u16], u16);
714 check_as_typed_data!(&[1u32, 3u32, 6u32], u32);
715 check_as_typed_data!(&[1u64, 3u64, 6u64], u64);
716 check_as_typed_data!(&[1f32, 3f32, 6f32], f32);
717 check_as_typed_data!(&[1f64, 3f64, 6f64], f64);
718 }
719
720 #[test]
721 fn test_count_bits() {
722 assert_eq!(0, Buffer::from(&[0b00000000]).count_set_bits_offset(0, 8));
723 assert_eq!(8, Buffer::from(&[0b11111111]).count_set_bits_offset(0, 8));
724 assert_eq!(3, Buffer::from(&[0b00001101]).count_set_bits_offset(0, 8));
725 assert_eq!(
726 6,
727 Buffer::from(&[0b01001001, 0b01010010]).count_set_bits_offset(0, 16)
728 );
729 assert_eq!(
730 16,
731 Buffer::from(&[0b11111111, 0b11111111]).count_set_bits_offset(0, 16)
732 );
733 }
734
735 #[test]
736 fn test_count_bits_slice() {
737 assert_eq!(
738 0,
739 Buffer::from(&[0b11111111, 0b00000000])
740 .slice(1)
741 .count_set_bits_offset(0, 8)
742 );
743 assert_eq!(
744 8,
745 Buffer::from(&[0b11111111, 0b11111111])
746 .slice_with_length(1, 1)
747 .count_set_bits_offset(0, 8)
748 );
749 assert_eq!(
750 3,
751 Buffer::from(&[0b11111111, 0b11111111, 0b00001101])
752 .slice(2)
753 .count_set_bits_offset(0, 8)
754 );
755 assert_eq!(
756 6,
757 Buffer::from(&[0b11111111, 0b01001001, 0b01010010])
758 .slice_with_length(1, 2)
759 .count_set_bits_offset(0, 16)
760 );
761 assert_eq!(
762 16,
763 Buffer::from(&[0b11111111, 0b11111111, 0b11111111, 0b11111111])
764 .slice(2)
765 .count_set_bits_offset(0, 16)
766 );
767 }
768
769 #[test]
770 fn test_count_bits_offset_slice() {
771 assert_eq!(8, Buffer::from(&[0b11111111]).count_set_bits_offset(0, 8));
772 assert_eq!(3, Buffer::from(&[0b11111111]).count_set_bits_offset(0, 3));
773 assert_eq!(5, Buffer::from(&[0b11111111]).count_set_bits_offset(3, 5));
774 assert_eq!(1, Buffer::from(&[0b11111111]).count_set_bits_offset(3, 1));
775 assert_eq!(0, Buffer::from(&[0b11111111]).count_set_bits_offset(8, 0));
776 assert_eq!(2, Buffer::from(&[0b01010101]).count_set_bits_offset(0, 3));
777 assert_eq!(
778 16,
779 Buffer::from(&[0b11111111, 0b11111111]).count_set_bits_offset(0, 16)
780 );
781 assert_eq!(
782 10,
783 Buffer::from(&[0b11111111, 0b11111111]).count_set_bits_offset(0, 10)
784 );
785 assert_eq!(
786 10,
787 Buffer::from(&[0b11111111, 0b11111111]).count_set_bits_offset(3, 10)
788 );
789 assert_eq!(
790 8,
791 Buffer::from(&[0b11111111, 0b11111111]).count_set_bits_offset(8, 8)
792 );
793 assert_eq!(
794 5,
795 Buffer::from(&[0b11111111, 0b11111111]).count_set_bits_offset(11, 5)
796 );
797 assert_eq!(
798 0,
799 Buffer::from(&[0b11111111, 0b11111111]).count_set_bits_offset(16, 0)
800 );
801 assert_eq!(
802 2,
803 Buffer::from(&[0b01101101, 0b10101010]).count_set_bits_offset(7, 5)
804 );
805 assert_eq!(
806 4,
807 Buffer::from(&[0b01101101, 0b10101010]).count_set_bits_offset(7, 9)
808 );
809 }
810
811 #[test]
812 fn test_unwind_safe() {
813 fn assert_unwind_safe<T: RefUnwindSafe + UnwindSafe>() {}
814 assert_unwind_safe::<Buffer>()
815 }
816
817 #[test]
818 fn test_from_foreign_vec() {
819 let mut vector = vec![1_i32, 2, 3, 4, 5];
820 let buffer = unsafe {
821 Buffer::from_custom_allocation(
822 NonNull::new_unchecked(vector.as_mut_ptr() as *mut u8),
823 vector.len() * std::mem::size_of::<i32>(),
824 Arc::new(vector),
825 )
826 };
827
828 let slice = buffer.typed_data::<i32>();
829 assert_eq!(slice, &[1, 2, 3, 4, 5]);
830
831 let buffer = buffer.slice(std::mem::size_of::<i32>());
832
833 let slice = buffer.typed_data::<i32>();
834 assert_eq!(slice, &[2, 3, 4, 5]);
835 }
836
837 #[test]
838 #[should_panic(expected = "the offset of the new Buffer cannot exceed the existing length")]
839 fn slice_overflow() {
840 let buffer = Buffer::from(MutableBuffer::from_len_zeroed(12));
841 buffer.slice_with_length(2, usize::MAX);
842 }
843
844 #[test]
845 fn test_vec_interop() {
846 let a: Vec<i128> = Vec::new();
848 let b = Buffer::from_vec(a);
849 b.into_vec::<i128>().unwrap();
850
851 let a: Vec<i128> = Vec::with_capacity(20);
853 let b = Buffer::from_vec(a);
854 let back = b.into_vec::<i128>().unwrap();
855 assert_eq!(back.len(), 0);
856 assert_eq!(back.capacity(), 20);
857
858 let mut a: Vec<i128> = Vec::with_capacity(3);
860 a.extend_from_slice(&[1, 2, 3]);
861 let b = Buffer::from_vec(a);
862 let back = b.into_vec::<i128>().unwrap();
863 assert_eq!(back.len(), 3);
864 assert_eq!(back.capacity(), 3);
865
866 let mut a: Vec<i128> = Vec::with_capacity(20);
868 a.extend_from_slice(&[1, 4, 7, 8, 9, 3, 6]);
869 let b = Buffer::from_vec(a);
870 let back = b.into_vec::<i128>().unwrap();
871 assert_eq!(back.len(), 7);
872 assert_eq!(back.capacity(), 20);
873
874 let a: Vec<i128> = Vec::new();
876 let b = Buffer::from_vec(a);
877 let b = b.into_vec::<i32>().unwrap_err();
878 b.into_vec::<i8>().unwrap_err();
879
880 let a: Vec<i64> = vec![1, 2, 3, 4];
884 let b = Buffer::from_vec(a);
885 let back = b.into_vec::<u64>().unwrap();
886 assert_eq!(back.len(), 4);
887 assert_eq!(back.capacity(), 4);
888
889 let mut b: Vec<i128> = Vec::with_capacity(4);
891 b.extend_from_slice(&[1, 2, 3, 4]);
892 let b = Buffer::from_vec(b);
893 let back = b.into_vec::<i256>().unwrap();
894 assert_eq!(back.len(), 2);
895 assert_eq!(back.capacity(), 2);
896
897 let b: Vec<i128> = vec![1, 2, 3];
899 let b = Buffer::from_vec(b);
900 b.into_vec::<i256>().unwrap_err();
901
902 let mut b: Vec<i128> = Vec::with_capacity(5);
904 b.extend_from_slice(&[1, 2, 3, 4]);
905 let b = Buffer::from_vec(b);
906 b.into_vec::<i256>().unwrap_err();
907
908 let mut b: Vec<i128> = Vec::with_capacity(4);
911 b.extend_from_slice(&[1, 2, 3]);
912 let b = Buffer::from_vec(b);
913 let back = b.into_vec::<i256>().unwrap();
914 assert_eq!(back.len(), 1);
915 assert_eq!(back.capacity(), 2);
916
917 let b = Buffer::from(MutableBuffer::new(10));
919 let b = b.into_vec::<u8>().unwrap_err();
920 b.into_vec::<u64>().unwrap_err();
921
922 let mut a: Vec<i128> = Vec::with_capacity(20);
924 a.extend_from_slice(&[1, 4, 7, 8, 9, 3, 6]);
925 let b = Buffer::from_vec(a);
926 let slice = b.slice_with_length(0, 64);
927
928 let slice = slice.into_vec::<i128>().unwrap_err();
930 drop(b);
931
932 let back = slice.into_vec::<i128>().unwrap();
934 assert_eq!(&back, &[1, 4, 7, 8]);
935 assert_eq!(back.capacity(), 20);
936
937 let mut a: Vec<i128> = Vec::with_capacity(8);
939 a.extend_from_slice(&[1, 4, 7, 3]);
940
941 let b = Buffer::from_vec(a);
942 let slice = b.slice_with_length(0, 34);
943 drop(b);
944
945 let back = slice.into_vec::<i128>().unwrap();
946 assert_eq!(&back, &[1, 4]);
947 assert_eq!(back.capacity(), 8);
948
949 let a: Vec<u32> = vec![1, 3, 4, 6];
951 let b = Buffer::from_vec(a).slice(2);
952 b.into_vec::<u32>().unwrap_err();
953
954 let b = MutableBuffer::new(16).into_buffer();
955 let b = b.into_vec::<u8>().unwrap_err(); let b = b.into_vec::<u32>().unwrap_err(); b.into_mutable().unwrap();
958
959 let b = Buffer::from_vec(vec![1_u32, 3, 5]);
960 let b = b.into_mutable().unwrap();
961 let b = Buffer::from(b);
962 let b = b.into_vec::<u32>().unwrap();
963 assert_eq!(b, &[1, 3, 5]);
964 }
965
966 #[test]
967 #[should_panic(expected = "capacity overflow")]
968 fn test_from_iter_overflow() {
969 let iter_len = usize::MAX / std::mem::size_of::<u64>() + 1;
970 let _ = Buffer::from_iter(std::iter::repeat(0_u64).take(iter_len));
971 }
972
973 #[test]
974 fn bit_slice_length_preserved() {
975 let buf = Buffer::from_iter(std::iter::repeat(true).take(64));
977
978 let assert_preserved = |offset: usize, len: usize| {
979 let new_buf = buf.bit_slice(offset, len);
980 assert_eq!(new_buf.len(), bit_util::ceil(len, 8));
981
982 if offset % 8 == 0 {
987 assert_eq!(new_buf.ptr_offset(), offset / 8);
988 } else {
989 assert_eq!(new_buf.ptr_offset(), 0);
990 }
991 };
992
993 for o in 0..=64 {
995 for l in (o..=64).map(|l| l - o) {
999 assert_preserved(o, l);
1002 }
1003 }
1004 }
1005}