1use crate::bit_util::ceil;
21
22pub fn set_bits(
29 write_data: &mut [u8],
30 data: &[u8],
31 offset_write: usize,
32 offset_read: usize,
33 len: usize,
34) -> usize {
35 assert!(offset_write + len <= write_data.len() * 8);
36 assert!(offset_read + len <= data.len() * 8);
37 let mut null_count = 0;
38 let mut acc = 0;
39 while len > acc {
40 let (n, len_set) = unsafe {
44 set_upto_64bits(
45 write_data,
46 data,
47 offset_write + acc,
48 offset_read + acc,
49 len - acc,
50 )
51 };
52 null_count += n;
53 acc += len_set;
54 }
55
56 null_count
57}
58
59#[inline]
65unsafe fn set_upto_64bits(
66 write_data: &mut [u8],
67 data: &[u8],
68 offset_write: usize,
69 offset_read: usize,
70 len: usize,
71) -> (usize, usize) {
72 let read_byte = offset_read / 8;
73 let read_shift = offset_read % 8;
74 let write_byte = offset_write / 8;
75 let write_shift = offset_write % 8;
76
77 if len >= 64 {
78 let chunk = unsafe { (data.as_ptr().add(read_byte) as *const u64).read_unaligned() };
79 if read_shift == 0 {
80 if write_shift == 0 {
81 let len = 64;
83 let null_count = chunk.count_zeros() as usize;
84 unsafe { write_u64_bytes(write_data, write_byte, chunk) };
85 (null_count, len)
86 } else {
87 let len = 64 - write_shift;
89 let chunk = chunk << write_shift;
90 let null_count = len - chunk.count_ones() as usize;
91 unsafe { or_write_u64_bytes(write_data, write_byte, chunk) };
92 (null_count, len)
93 }
94 } else if write_shift == 0 {
95 let len = 64 - 8; let chunk = (chunk >> read_shift) & 0x00FFFFFFFFFFFFFF; let null_count = len - chunk.count_ones() as usize;
99 unsafe { write_u64_bytes(write_data, write_byte, chunk) };
100 (null_count, len)
101 } else {
102 let len = 64 - std::cmp::max(read_shift, write_shift);
103 let chunk = (chunk >> read_shift) << write_shift;
104 let null_count = len - chunk.count_ones() as usize;
105 unsafe { or_write_u64_bytes(write_data, write_byte, chunk) };
106 (null_count, len)
107 }
108 } else if len == 1 {
109 let byte_chunk = (unsafe { data.get_unchecked(read_byte) } >> read_shift) & 1;
110 unsafe { *write_data.get_unchecked_mut(write_byte) |= byte_chunk << write_shift };
111 ((byte_chunk ^ 1) as usize, 1)
112 } else {
113 let len = std::cmp::min(len, 64 - std::cmp::max(read_shift, write_shift));
114 let bytes = ceil(len + read_shift, 8);
115 let chunk = unsafe { read_bytes_to_u64(data, read_byte, bytes) };
117 let mask = u64::MAX >> (64 - len);
118 let chunk = (chunk >> read_shift) & mask; let chunk = chunk << write_shift; let null_count = len - chunk.count_ones() as usize;
121 let bytes = ceil(len + write_shift, 8);
122 for (i, c) in chunk.to_le_bytes().iter().enumerate().take(bytes) {
123 unsafe { *write_data.get_unchecked_mut(write_byte + i) |= c };
124 }
125 (null_count, len)
126 }
127}
128
129#[inline]
132unsafe fn read_bytes_to_u64(data: &[u8], offset: usize, count: usize) -> u64 {
133 debug_assert!(count <= 8);
134 let mut tmp: u64 = 0;
135 let src = data.as_ptr().add(offset);
136 unsafe {
137 std::ptr::copy_nonoverlapping(src, &mut tmp as *mut _ as *mut u8, count);
138 }
139 tmp
140}
141
142#[inline]
145unsafe fn write_u64_bytes(data: &mut [u8], offset: usize, chunk: u64) {
146 let ptr = data.as_mut_ptr().add(offset) as *mut u64;
147 ptr.write_unaligned(chunk);
148}
149
150#[inline]
156unsafe fn or_write_u64_bytes(data: &mut [u8], offset: usize, chunk: u64) {
157 let ptr = data.as_mut_ptr().add(offset);
158 let chunk = chunk | (*ptr) as u64;
159 (ptr as *mut u64).write_unaligned(chunk);
160}
161
162#[cfg(test)]
163mod tests {
164 use super::*;
165 use crate::bit_util::{get_bit, set_bit, unset_bit};
166 use rand::prelude::StdRng;
167 use rand::{Fill, Rng, SeedableRng};
168 use std::fmt::Display;
169
170 #[test]
171 fn test_set_bits_aligned() {
172 SetBitsTest {
173 write_data: vec![0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
174 data: vec![
175 0b11100111, 0b10100101, 0b10011001, 0b11011011, 0b11101011, 0b11000011, 0b11100111,
176 0b10100101,
177 ],
178 offset_write: 8,
179 offset_read: 0,
180 len: 64,
181 expected_data: vec![
182 0, 0b11100111, 0b10100101, 0b10011001, 0b11011011, 0b11101011, 0b11000011,
183 0b11100111, 0b10100101, 0,
184 ],
185 expected_null_count: 24,
186 }
187 .verify();
188 }
189
190 #[test]
191 fn test_set_bits_unaligned_destination_start() {
192 SetBitsTest {
193 write_data: vec![0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
194 data: vec![
195 0b11100111, 0b10100101, 0b10011001, 0b11011011, 0b11101011, 0b11000011, 0b11100111,
196 0b10100101,
197 ],
198 offset_write: 3,
199 offset_read: 0,
200 len: 64,
201 expected_data: vec![
202 0b00111000, 0b00101111, 0b11001101, 0b11011100, 0b01011110, 0b00011111, 0b00111110,
203 0b00101111, 0b00000101, 0b00000000,
204 ],
205 expected_null_count: 24,
206 }
207 .verify();
208 }
209
210 #[test]
211 fn test_set_bits_unaligned_destination_end() {
212 SetBitsTest {
213 write_data: vec![0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
214 data: vec![
215 0b11100111, 0b10100101, 0b10011001, 0b11011011, 0b11101011, 0b11000011, 0b11100111,
216 0b10100101,
217 ],
218 offset_write: 8,
219 offset_read: 0,
220 len: 62,
221 expected_data: vec![
222 0, 0b11100111, 0b10100101, 0b10011001, 0b11011011, 0b11101011, 0b11000011,
223 0b11100111, 0b00100101, 0,
224 ],
225 expected_null_count: 23,
226 }
227 .verify();
228 }
229
230 #[test]
231 fn test_set_bits_unaligned() {
232 SetBitsTest {
233 write_data: vec![0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
234 data: vec![
235 0b11100111, 0b10100101, 0b10011001, 0b11011011, 0b11101011, 0b11000011, 0b11100111,
236 0b10100101, 0b10011001, 0b11011011, 0b11101011, 0b11000011, 0b11100111, 0b10100101,
237 0b10011001, 0b11011011, 0b11101011, 0b11000011,
238 ],
239 offset_write: 3,
240 offset_read: 5,
241 len: 95,
242 expected_data: vec![
243 0b01111000, 0b01101001, 0b11100110, 0b11110110, 0b11111010, 0b11110000, 0b01111001,
244 0b01101001, 0b11100110, 0b11110110, 0b11111010, 0b11110000, 0b00000001,
245 ],
246 expected_null_count: 35,
247 }
248 .verify();
249 }
250
251 #[test]
252 fn set_bits_fuzz() {
253 let mut rng = StdRng::seed_from_u64(42);
254 let mut data = SetBitsTest::new();
255 for _ in 0..100 {
256 data.regen(&mut rng);
257 data.verify();
258 }
259 }
260
261 #[derive(Debug, Default)]
262 struct SetBitsTest {
263 write_data: Vec<u8>,
265 data: Vec<u8>,
267 offset_write: usize,
268 offset_read: usize,
269 len: usize,
270 expected_data: Vec<u8>,
272 expected_null_count: usize,
274 }
275
276 struct BinaryFormatter<'a>(&'a [u8]);
278 impl Display for BinaryFormatter<'_> {
279 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
280 for byte in self.0 {
281 write!(f, "{:08b} ", byte)?;
282 }
283 write!(f, " ")?;
284 Ok(())
285 }
286 }
287
288 impl Display for SetBitsTest {
289 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
290 writeln!(f, "SetBitsTest {{")?;
291 writeln!(f, " write_data: {}", BinaryFormatter(&self.write_data))?;
292 writeln!(f, " data: {}", BinaryFormatter(&self.data))?;
293 writeln!(
294 f,
295 " expected_data: {}",
296 BinaryFormatter(&self.expected_data)
297 )?;
298 writeln!(f, " offset_write: {}", self.offset_write)?;
299 writeln!(f, " offset_read: {}", self.offset_read)?;
300 writeln!(f, " len: {}", self.len)?;
301 writeln!(f, " expected_null_count: {}", self.expected_null_count)?;
302 writeln!(f, "}}")
303 }
304 }
305
306 impl SetBitsTest {
307 fn new() -> Self {
309 Self::default()
310 }
311
312 fn regen(&mut self, rng: &mut StdRng) {
314 let len = rng.gen_range(0..=200);
326
327 let offset_write_bits = rng.gen_range(0..=200);
329 let offset_write_bytes = if offset_write_bits % 8 == 0 {
330 offset_write_bits / 8
331 } else {
332 (offset_write_bits / 8) + 1
333 };
334 let extra_write_data_bytes = rng.gen_range(0..=5); let extra_read_data_bytes = rng.gen_range(0..=5); let offset_read_bits = rng.gen_range(0..=200);
339 let offset_read_bytes = if offset_read_bits % 8 != 0 {
340 (offset_read_bits / 8) + 1
341 } else {
342 offset_read_bits / 8
343 };
344
345 self.write_data.clear();
347 self.write_data
348 .resize(offset_write_bytes + len + extra_write_data_bytes, 0);
349
350 self.offset_write = offset_write_bits;
354
355 self.data
357 .resize(offset_read_bytes + len + extra_read_data_bytes, 0);
358 self.data.try_fill(rng).unwrap();
360 self.offset_read = offset_read_bits;
361
362 self.len = len;
363
364 self.expected_data.resize(self.write_data.len(), 0);
366 self.expected_data.copy_from_slice(&self.write_data);
367
368 self.expected_null_count = 0;
369 for i in 0..self.len {
370 let bit = get_bit(&self.data, self.offset_read + i);
371 if bit {
372 set_bit(&mut self.expected_data, self.offset_write + i);
373 } else {
374 unset_bit(&mut self.expected_data, self.offset_write + i);
375 self.expected_null_count += 1;
376 }
377 }
378 }
379
380 fn verify(&self) {
382 let mut actual = self.write_data.to_vec();
384 let null_count = set_bits(
385 &mut actual,
386 &self.data,
387 self.offset_write,
388 self.offset_read,
389 self.len,
390 );
391
392 assert_eq!(actual, self.expected_data, "self: {}", self);
393 assert_eq!(null_count, self.expected_null_count, "self: {}", self);
394 }
395 }
396
397 #[test]
398 fn test_set_upto_64bits() {
399 let write_data: &mut [u8] = &mut [0; 9];
401 let data: &[u8] = &[
402 0b00000001, 0b00000001, 0b00000001, 0b00000001, 0b00000001, 0b00000001, 0b00000001,
403 0b00000001, 0b00000001,
404 ];
405 let offset_write = 1;
406 let offset_read = 0;
407 let len = 65;
408 let (n, len_set) =
409 unsafe { set_upto_64bits(write_data, data, offset_write, offset_read, len) };
410 assert_eq!(n, 55);
411 assert_eq!(len_set, 63);
412 assert_eq!(
413 write_data,
414 &[
415 0b00000010, 0b00000010, 0b00000010, 0b00000010, 0b00000010, 0b00000010, 0b00000010,
416 0b00000010, 0b00000000
417 ]
418 );
419
420 let write_data: &mut [u8] = &mut [0b00000000];
422 let data: &[u8] = &[0b00000001];
423 let offset_write = 1;
424 let offset_read = 0;
425 let len = 1;
426 let (n, len_set) =
427 unsafe { set_upto_64bits(write_data, data, offset_write, offset_read, len) };
428 assert_eq!(n, 0);
429 assert_eq!(len_set, 1);
430 assert_eq!(write_data, &[0b00000010]);
431 }
432}