zlib_rs/
deflate.rs

1#![warn(unsafe_op_in_unsafe_fn)]
2use core::{ffi::CStr, marker::PhantomData, mem::MaybeUninit, ops::ControlFlow};
3
4use crate::{
5    adler32::adler32,
6    allocate::Allocator,
7    c_api::{gz_header, internal_state, z_checksum, z_stream},
8    crc32::{crc32, Crc32Fold},
9    read_buf::ReadBuf,
10    trace,
11    weak_slice::{WeakArrayMut, WeakSliceMut},
12    DeflateFlush, ReturnCode, ADLER32_INITIAL_VALUE, CRC32_INITIAL_VALUE, MAX_WBITS, MIN_WBITS,
13};
14
15use self::{
16    algorithm::CONFIGURATION_TABLE,
17    hash_calc::{HashCalcVariant, RollHashCalc, StandardHashCalc},
18    pending::Pending,
19    trees_tbl::STATIC_LTREE,
20    window::Window,
21};
22
23mod algorithm;
24mod compare256;
25mod hash_calc;
26mod longest_match;
27mod pending;
28mod slide_hash;
29mod trees_tbl;
30mod window;
31
32// Position relative to the current window
33pub(crate) type Pos = u16;
34
35// SAFETY: This struct must have the same layout as [`z_stream`], so that casts and transmutations
36// between the two can work without UB.
37#[repr(C)]
38pub struct DeflateStream<'a> {
39    pub(crate) next_in: *mut crate::c_api::Bytef,
40    pub(crate) avail_in: crate::c_api::uInt,
41    pub(crate) total_in: crate::c_api::z_size,
42    pub(crate) next_out: *mut crate::c_api::Bytef,
43    pub(crate) avail_out: crate::c_api::uInt,
44    pub(crate) total_out: crate::c_api::z_size,
45    pub(crate) msg: *const core::ffi::c_char,
46    pub(crate) state: &'a mut State<'a>,
47    pub(crate) alloc: Allocator<'a>,
48    pub(crate) data_type: core::ffi::c_int,
49    pub(crate) adler: crate::c_api::z_checksum,
50    pub(crate) reserved: crate::c_api::uLong,
51}
52
53impl<'a> DeflateStream<'a> {
54    // z_stream and DeflateStream must have the same layout. Do our best to check if this is true.
55    // (imperfect check, but should catch most mistakes.)
56    const _S: () = assert!(core::mem::size_of::<z_stream>() == core::mem::size_of::<Self>());
57    const _A: () = assert!(core::mem::align_of::<z_stream>() == core::mem::align_of::<Self>());
58
59    /// # Safety
60    ///
61    /// Behavior is undefined if any of the following conditions are violated:
62    ///
63    /// - `strm` satisfies the conditions of [`pointer::as_mut`]
64    /// - if not `NULL`, `strm` as initialized using [`init`] or similar
65    ///
66    /// [`pointer::as_mut`]: https://doc.rust-lang.org/core/primitive.pointer.html#method.as_mut
67    #[inline(always)]
68    pub unsafe fn from_stream_mut(strm: *mut z_stream) -> Option<&'a mut Self> {
69        {
70            // Safety: ptr points to a valid value of type z_stream (if non-null)
71            let stream = unsafe { strm.as_ref() }?;
72
73            if stream.zalloc.is_none() || stream.zfree.is_none() {
74                return None;
75            }
76
77            if stream.state.is_null() {
78                return None;
79            }
80        }
81
82        // SAFETY: DeflateStream has an equivalent layout as z_stream
83        unsafe { strm.cast::<DeflateStream>().as_mut() }
84    }
85
86    fn as_z_stream_mut(&mut self) -> &mut z_stream {
87        // SAFETY: a valid &mut DeflateStream is also a valid &mut z_stream
88        unsafe { &mut *(self as *mut DeflateStream as *mut z_stream) }
89    }
90
91    pub fn pending(&self) -> (usize, u8) {
92        (
93            self.state.bit_writer.pending.pending,
94            self.state.bit_writer.bits_used,
95        )
96    }
97}
98
99/// number of elements in hash table
100pub(crate) const HASH_SIZE: usize = 65536;
101/// log2(HASH_SIZE)
102const HASH_BITS: usize = 16;
103
104/// Maximum value for memLevel in deflateInit2
105const MAX_MEM_LEVEL: i32 = 9;
106const DEF_MEM_LEVEL: i32 = if MAX_MEM_LEVEL > 8 { 8 } else { MAX_MEM_LEVEL };
107
108#[repr(i32)]
109#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
110#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
111pub enum Method {
112    #[default]
113    Deflated = 8,
114}
115
116impl TryFrom<i32> for Method {
117    type Error = ();
118
119    fn try_from(value: i32) -> Result<Self, Self::Error> {
120        match value {
121            8 => Ok(Self::Deflated),
122            _ => Err(()),
123        }
124    }
125}
126
127#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
128#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
129pub struct DeflateConfig {
130    pub level: i32,
131    pub method: Method,
132    pub window_bits: i32,
133    pub mem_level: i32,
134    pub strategy: Strategy,
135}
136
137#[cfg(any(test, feature = "__internal-test"))]
138impl quickcheck::Arbitrary for DeflateConfig {
139    fn arbitrary(g: &mut quickcheck::Gen) -> Self {
140        let mem_levels: Vec<_> = (1..=9).collect();
141        let levels: Vec<_> = (0..=9).collect();
142
143        let mut window_bits = Vec::new();
144        window_bits.extend(9..=15); // zlib
145        window_bits.extend(9 + 16..=15 + 16); // gzip
146        window_bits.extend(-15..=-9); // raw
147
148        Self {
149            level: *g.choose(&levels).unwrap(),
150            method: Method::Deflated,
151            window_bits: *g.choose(&window_bits).unwrap(),
152            mem_level: *g.choose(&mem_levels).unwrap(),
153            strategy: *g
154                .choose(&[
155                    Strategy::Default,
156                    Strategy::Filtered,
157                    Strategy::HuffmanOnly,
158                    Strategy::Rle,
159                    Strategy::Fixed,
160                ])
161                .unwrap(),
162        }
163    }
164}
165
166impl DeflateConfig {
167    pub fn new(level: i32) -> Self {
168        Self {
169            level,
170            ..Self::default()
171        }
172    }
173}
174
175impl Default for DeflateConfig {
176    fn default() -> Self {
177        Self {
178            level: crate::c_api::Z_DEFAULT_COMPRESSION,
179            method: Method::Deflated,
180            window_bits: MAX_WBITS,
181            mem_level: DEF_MEM_LEVEL,
182            strategy: Strategy::Default,
183        }
184    }
185}
186
187pub fn init(stream: &mut z_stream, config: DeflateConfig) -> ReturnCode {
188    let DeflateConfig {
189        mut level,
190        method: _,
191        mut window_bits,
192        mem_level,
193        strategy,
194    } = config;
195
196    /* Todo: ignore strm->next_in if we use it as window */
197    stream.msg = core::ptr::null_mut();
198
199    // for safety we  must really make sure that alloc and free are consistent
200    // this is a (slight) deviation from stock zlib. In this crate we pick the rust
201    // allocator as the default, but `libz-rs-sys` always explicitly sets an allocator,
202    // and can configure the C allocator
203    #[cfg(feature = "rust-allocator")]
204    if stream.zalloc.is_none() || stream.zfree.is_none() {
205        stream.configure_default_rust_allocator()
206    }
207
208    #[cfg(feature = "c-allocator")]
209    if stream.zalloc.is_none() || stream.zfree.is_none() {
210        stream.configure_default_c_allocator()
211    }
212
213    if stream.zalloc.is_none() || stream.zfree.is_none() {
214        return ReturnCode::StreamError;
215    }
216
217    if level == crate::c_api::Z_DEFAULT_COMPRESSION {
218        level = 6;
219    }
220
221    let wrap = if window_bits < 0 {
222        if window_bits < -MAX_WBITS {
223            return ReturnCode::StreamError;
224        }
225        window_bits = -window_bits;
226
227        0
228    } else if window_bits > MAX_WBITS {
229        window_bits -= 16;
230        2
231    } else {
232        1
233    };
234
235    if (!(1..=MAX_MEM_LEVEL).contains(&mem_level))
236        || !(MIN_WBITS..=MAX_WBITS).contains(&window_bits)
237        || !(0..=9).contains(&level)
238        || (window_bits == 8 && wrap != 1)
239    {
240        return ReturnCode::StreamError;
241    }
242
243    let window_bits = if window_bits == 8 {
244        9 /* until 256-byte window bug fixed */
245    } else {
246        window_bits as usize
247    };
248
249    let alloc = Allocator {
250        zalloc: stream.zalloc.unwrap(),
251        zfree: stream.zfree.unwrap(),
252        opaque: stream.opaque,
253        _marker: PhantomData,
254    };
255
256    // allocated here to have the same order as zlib
257    let Some(state_allocation) = alloc.allocate_raw::<State>() else {
258        return ReturnCode::MemError;
259    };
260
261    let w_size = 1 << window_bits;
262    let window = Window::new_in(&alloc, window_bits);
263
264    let prev = alloc.allocate_slice_raw::<u16>(w_size);
265    let head = alloc.allocate_raw::<[u16; HASH_SIZE]>();
266
267    let lit_bufsize = 1 << (mem_level + 6); // 16K elements by default
268    let pending = Pending::new_in(&alloc, 4 * lit_bufsize);
269
270    // zlib-ng overlays the pending_buf and sym_buf. We cannot really do that safely
271    let sym_buf = ReadBuf::new_in(&alloc, 3 * lit_bufsize);
272
273    // if any allocation failed, clean up allocations that did succeed
274    let (window, prev, head, pending, sym_buf) = match (window, prev, head, pending, sym_buf) {
275        (Some(window), Some(prev), Some(head), Some(pending), Some(sym_buf)) => {
276            (window, prev, head, pending, sym_buf)
277        }
278        (window, prev, head, pending, sym_buf) => {
279            // SAFETY: these pointers/structures are discarded after deallocation.
280            unsafe {
281                if let Some(mut sym_buf) = sym_buf {
282                    alloc.deallocate(sym_buf.as_mut_ptr(), sym_buf.capacity())
283                }
284                if let Some(mut pending) = pending {
285                    pending.drop_in(&alloc);
286                }
287                if let Some(head) = head {
288                    alloc.deallocate(head.as_ptr(), 1)
289                }
290                if let Some(prev) = prev {
291                    alloc.deallocate(prev.as_ptr(), w_size)
292                }
293                if let Some(mut window) = window {
294                    window.drop_in(&alloc);
295                }
296
297                alloc.deallocate(state_allocation.as_ptr(), 1);
298            }
299
300            return ReturnCode::MemError;
301        }
302    };
303
304    // zero initialize the memory
305    let prev = prev.as_ptr(); // FIXME: write_bytes is stable for NonNull since 1.80.0
306    unsafe { prev.write_bytes(0, w_size) };
307    let prev = unsafe { WeakSliceMut::from_raw_parts_mut(prev, w_size) };
308
309    // zero out head's first element
310    let head = head.as_ptr(); // FIXME: write_bytes is stable for NonNull since 1.80.0
311    unsafe { head.write_bytes(0, 1) };
312    let head = unsafe { WeakArrayMut::<u16, HASH_SIZE>::from_ptr(head) };
313
314    let state = State {
315        status: Status::Init,
316
317        // window
318        w_size,
319        w_mask: w_size - 1,
320
321        // allocated values
322        window,
323        prev,
324        head,
325        bit_writer: BitWriter::from_pending(pending),
326
327        //
328        lit_bufsize,
329
330        //
331        sym_buf,
332
333        //
334        level: level as i8, // set to zero again for testing?
335        strategy,
336
337        // these fields are not set explicitly at this point
338        last_flush: 0,
339        wrap,
340        strstart: 0,
341        block_start: 0,
342        block_open: 0,
343        window_size: 0,
344        insert: 0,
345        matches: 0,
346        opt_len: 0,
347        static_len: 0,
348        lookahead: 0,
349        ins_h: 0,
350        max_chain_length: 0,
351        max_lazy_match: 0,
352        good_match: 0,
353        nice_match: 0,
354
355        //
356        l_desc: TreeDesc::EMPTY,
357        d_desc: TreeDesc::EMPTY,
358        bl_desc: TreeDesc::EMPTY,
359
360        //
361        crc_fold: Crc32Fold::new(),
362        gzhead: None,
363        gzindex: 0,
364
365        //
366        match_start: 0,
367        prev_match: 0,
368        match_available: false,
369        prev_length: 0,
370
371        // just provide a valid default; gets set properly later
372        hash_calc_variant: HashCalcVariant::Standard,
373        _cache_line_0: (),
374        _cache_line_1: (),
375        _cache_line_2: (),
376        _cache_line_3: (),
377        _padding_0: 0,
378    };
379
380    unsafe { state_allocation.as_ptr().write(state) }; // FIXME: write is stable for NonNull since 1.80.0
381
382    stream.state = state_allocation.as_ptr() as *mut internal_state;
383
384    let Some(stream) = (unsafe { DeflateStream::from_stream_mut(stream) }) else {
385        if cfg!(debug_assertions) {
386            unreachable!("we should have initialized the stream properly");
387        }
388        return ReturnCode::StreamError;
389    };
390
391    reset(stream)
392}
393
394pub fn params(stream: &mut DeflateStream, level: i32, strategy: Strategy) -> ReturnCode {
395    let level = if level == crate::c_api::Z_DEFAULT_COMPRESSION {
396        6
397    } else {
398        level
399    };
400
401    if !(0..=9).contains(&level) {
402        return ReturnCode::StreamError;
403    }
404
405    let level = level as i8;
406
407    let func = CONFIGURATION_TABLE[stream.state.level as usize].func;
408
409    let state = &mut stream.state;
410
411    if (strategy != state.strategy || func != CONFIGURATION_TABLE[level as usize].func)
412        && state.last_flush != -2
413    {
414        // Flush the last buffer.
415        let err = deflate(stream, DeflateFlush::Block);
416        if err == ReturnCode::StreamError {
417            return err;
418        }
419
420        let state = &mut stream.state;
421
422        if stream.avail_in != 0
423            || ((state.strstart as isize - state.block_start) + state.lookahead as isize) != 0
424        {
425            return ReturnCode::BufError;
426        }
427    }
428
429    let state = &mut stream.state;
430
431    if state.level != level {
432        if state.level == 0 && state.matches != 0 {
433            if state.matches == 1 {
434                self::slide_hash::slide_hash(state);
435            } else {
436                state.head.as_mut_slice().fill(0);
437            }
438            state.matches = 0;
439        }
440
441        lm_set_level(state, level);
442    }
443
444    state.strategy = strategy;
445
446    ReturnCode::Ok
447}
448
449pub fn set_dictionary(stream: &mut DeflateStream, mut dictionary: &[u8]) -> ReturnCode {
450    let state = &mut stream.state;
451
452    let wrap = state.wrap;
453
454    if wrap == 2 || (wrap == 1 && state.status != Status::Init) || state.lookahead != 0 {
455        return ReturnCode::StreamError;
456    }
457
458    // when using zlib wrappers, compute Adler-32 for provided dictionary
459    if wrap == 1 {
460        stream.adler = adler32(stream.adler as u32, dictionary) as z_checksum;
461    }
462
463    // avoid computing Adler-32 in read_buf
464    state.wrap = 0;
465
466    // if dictionary would fill window, just replace the history
467    if dictionary.len() >= state.window.capacity() {
468        if wrap == 0 {
469            // clear the hash table
470            state.head.as_mut_slice().fill(0);
471
472            state.strstart = 0;
473            state.block_start = 0;
474            state.insert = 0;
475        } else {
476            /* already empty otherwise */
477        }
478
479        // use the tail
480        dictionary = &dictionary[dictionary.len() - state.w_size..];
481    }
482
483    // insert dictionary into window and hash
484    let avail = stream.avail_in;
485    let next = stream.next_in;
486    stream.avail_in = dictionary.len() as _;
487    stream.next_in = dictionary.as_ptr() as *mut u8;
488    fill_window(stream);
489
490    while stream.state.lookahead >= STD_MIN_MATCH {
491        let str = stream.state.strstart;
492        let n = stream.state.lookahead - (STD_MIN_MATCH - 1);
493        stream.state.insert_string(str, n);
494        stream.state.strstart = str + n;
495        stream.state.lookahead = STD_MIN_MATCH - 1;
496        fill_window(stream);
497    }
498
499    let state = &mut stream.state;
500
501    state.strstart += state.lookahead;
502    state.block_start = state.strstart as _;
503    state.insert = state.lookahead;
504    state.lookahead = 0;
505    state.prev_length = 0;
506    state.match_available = false;
507
508    // restore the state
509    stream.next_in = next;
510    stream.avail_in = avail;
511    state.wrap = wrap;
512
513    ReturnCode::Ok
514}
515
516pub fn prime(stream: &mut DeflateStream, mut bits: i32, value: i32) -> ReturnCode {
517    // our logic actually supports up to 32 bits.
518    debug_assert!(bits <= 16, "zlib only supports up to 16 bits here");
519
520    let mut value64 = value as u64;
521
522    let state = &mut stream.state;
523
524    if bits < 0
525        || bits > BitWriter::BIT_BUF_SIZE as i32
526        || bits > (core::mem::size_of_val(&value) << 3) as i32
527    {
528        return ReturnCode::BufError;
529    }
530
531    let mut put;
532
533    loop {
534        put = BitWriter::BIT_BUF_SIZE - state.bit_writer.bits_used;
535        let put = Ord::min(put as i32, bits);
536
537        if state.bit_writer.bits_used == 0 {
538            state.bit_writer.bit_buffer = value64;
539        } else {
540            state.bit_writer.bit_buffer |=
541                (value64 & ((1 << put) - 1)) << state.bit_writer.bits_used;
542        }
543
544        state.bit_writer.bits_used += put as u8;
545        state.bit_writer.flush_bits();
546        value64 >>= put;
547        bits -= put;
548
549        if bits == 0 {
550            break;
551        }
552    }
553
554    ReturnCode::Ok
555}
556
557pub fn copy<'a>(
558    dest: &mut MaybeUninit<DeflateStream<'a>>,
559    source: &mut DeflateStream<'a>,
560) -> ReturnCode {
561    // SAFETY: source and dest are both mutable references, so guaranteed not to overlap.
562    // dest being a reference to maybe uninitialized memory makes a copy of 1 DeflateStream valid.
563    unsafe {
564        core::ptr::copy_nonoverlapping(source, dest.as_mut_ptr(), 1);
565    }
566
567    let alloc = &source.alloc;
568
569    // allocated here to have the same order as zlib
570    let Some(state_allocation) = alloc.allocate_raw::<State>() else {
571        return ReturnCode::MemError;
572    };
573
574    let source_state = &source.state;
575
576    let window = source_state.window.clone_in(alloc);
577
578    let prev = alloc.allocate_slice_raw::<u16>(source_state.w_size);
579    let head = alloc.allocate_raw::<[u16; HASH_SIZE]>();
580
581    let pending = source_state.bit_writer.pending.clone_in(alloc);
582    let sym_buf = source_state.sym_buf.clone_in(alloc);
583
584    // if any allocation failed, clean up allocations that did succeed
585    let (window, prev, head, pending, sym_buf) = match (window, prev, head, pending, sym_buf) {
586        (Some(window), Some(prev), Some(head), Some(pending), Some(sym_buf)) => {
587            (window, prev, head, pending, sym_buf)
588        }
589        (window, prev, head, pending, sym_buf) => {
590            // SAFETY: this access is in-bounds
591            let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state) };
592            unsafe { core::ptr::write(field_ptr as *mut *mut State, core::ptr::null_mut()) };
593
594            // SAFETY: it is an assumpion on DeflateStream that (de)allocation does not cause UB.
595            unsafe {
596                if let Some(mut sym_buf) = sym_buf {
597                    alloc.deallocate(sym_buf.as_mut_ptr(), sym_buf.capacity())
598                }
599                if let Some(mut pending) = pending {
600                    pending.drop_in(alloc);
601                }
602                if let Some(head) = head {
603                    alloc.deallocate(head.as_ptr(), HASH_SIZE)
604                }
605                if let Some(prev) = prev {
606                    alloc.deallocate(prev.as_ptr(), source_state.w_size)
607                }
608                if let Some(mut window) = window {
609                    window.drop_in(alloc);
610                }
611
612                alloc.deallocate(state_allocation.as_ptr(), 1);
613            }
614
615            return ReturnCode::MemError;
616        }
617    };
618
619    let prev = unsafe {
620        let prev = prev.as_ptr();
621        prev.copy_from_nonoverlapping(source_state.prev.as_ptr(), source_state.prev.len());
622        WeakSliceMut::from_raw_parts_mut(prev, source_state.prev.len())
623    };
624
625    // FIXME: write_bytes is stable for NonNull since 1.80.0
626    let head = unsafe {
627        let head = head.as_ptr();
628        head.write_bytes(0, 1);
629        head.cast::<u16>().write(source_state.head.as_slice()[0]);
630        WeakArrayMut::from_ptr(head)
631    };
632
633    let mut bit_writer = BitWriter::from_pending(pending);
634    bit_writer.bits_used = source_state.bit_writer.bits_used;
635    bit_writer.bit_buffer = source_state.bit_writer.bit_buffer;
636
637    let dest_state = State {
638        status: source_state.status,
639        bit_writer,
640        last_flush: source_state.last_flush,
641        wrap: source_state.wrap,
642        strategy: source_state.strategy,
643        level: source_state.level,
644        good_match: source_state.good_match,
645        nice_match: source_state.nice_match,
646        l_desc: source_state.l_desc.clone(),
647        d_desc: source_state.d_desc.clone(),
648        bl_desc: source_state.bl_desc.clone(),
649        prev_match: source_state.prev_match,
650        match_available: source_state.match_available,
651        strstart: source_state.strstart,
652        match_start: source_state.match_start,
653        prev_length: source_state.prev_length,
654        max_chain_length: source_state.max_chain_length,
655        max_lazy_match: source_state.max_lazy_match,
656        block_start: source_state.block_start,
657        block_open: source_state.block_open,
658        window,
659        sym_buf,
660        lit_bufsize: source_state.lit_bufsize,
661        window_size: source_state.window_size,
662        matches: source_state.matches,
663        opt_len: source_state.opt_len,
664        static_len: source_state.static_len,
665        insert: source_state.insert,
666        w_size: source_state.w_size,
667        w_mask: source_state.w_mask,
668        lookahead: source_state.lookahead,
669        prev,
670        head,
671        ins_h: source_state.ins_h,
672        hash_calc_variant: source_state.hash_calc_variant,
673        crc_fold: source_state.crc_fold,
674        gzhead: None,
675        gzindex: source_state.gzindex,
676        _cache_line_0: (),
677        _cache_line_1: (),
678        _cache_line_2: (),
679        _cache_line_3: (),
680        _padding_0: source_state._padding_0,
681    };
682
683    // write the cloned state into state_ptr
684    unsafe { state_allocation.as_ptr().write(dest_state) }; // FIXME: write is stable for NonNull since 1.80.0
685
686    // insert the state_ptr into `dest`
687    let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state) };
688    unsafe { core::ptr::write(field_ptr as *mut *mut State, state_allocation.as_ptr()) };
689
690    // update the gzhead field (it contains a mutable reference so we need to be careful
691    let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state.gzhead) };
692    unsafe { core::ptr::copy(&source_state.gzhead, field_ptr, 1) };
693
694    ReturnCode::Ok
695}
696
697/// # Returns
698///
699/// - Err when deflate is not done. A common cause is insufficient output space
700/// - Ok otherwise
701pub fn end<'a>(stream: &'a mut DeflateStream) -> Result<&'a mut z_stream, &'a mut z_stream> {
702    let status = stream.state.status;
703
704    let alloc = stream.alloc;
705
706    // deallocate in reverse order of allocations
707    unsafe {
708        // SAFETY: we make sure that these fields are not used (by invalidating the state pointer)
709        stream.state.sym_buf.drop_in(&alloc);
710        stream.state.bit_writer.pending.drop_in(&alloc);
711        alloc.deallocate(stream.state.head.as_mut_ptr(), 1);
712        if !stream.state.prev.is_empty() {
713            alloc.deallocate(stream.state.prev.as_mut_ptr(), stream.state.prev.len());
714        }
715        stream.state.window.drop_in(&alloc);
716    }
717
718    let stream = stream.as_z_stream_mut();
719    let state = core::mem::replace(&mut stream.state, core::ptr::null_mut());
720
721    // SAFETY: `state` is not used later
722    unsafe {
723        alloc.deallocate(state as *mut State, 1);
724    }
725
726    match status {
727        Status::Busy => Err(stream),
728        _ => Ok(stream),
729    }
730}
731
732pub fn reset(stream: &mut DeflateStream) -> ReturnCode {
733    let ret = reset_keep(stream);
734
735    if ret == ReturnCode::Ok {
736        lm_init(stream.state);
737    }
738
739    ret
740}
741
742fn reset_keep(stream: &mut DeflateStream) -> ReturnCode {
743    stream.total_in = 0;
744    stream.total_out = 0;
745    stream.msg = core::ptr::null_mut();
746    stream.data_type = crate::c_api::Z_UNKNOWN;
747
748    let state = &mut stream.state;
749
750    state.bit_writer.pending.reset_keep();
751
752    // can be made negative by deflate(..., Z_FINISH);
753    state.wrap = state.wrap.abs();
754
755    state.status = match state.wrap {
756        2 => Status::GZip,
757        _ => Status::Init,
758    };
759
760    stream.adler = match state.wrap {
761        2 => {
762            state.crc_fold = Crc32Fold::new();
763            CRC32_INITIAL_VALUE as _
764        }
765        _ => ADLER32_INITIAL_VALUE as _,
766    };
767
768    state.last_flush = -2;
769
770    state.zng_tr_init();
771
772    ReturnCode::Ok
773}
774
775fn lm_init(state: &mut State) {
776    state.window_size = 2 * state.w_size;
777
778    // zlib uses CLEAR_HASH here
779    state.head.as_mut_slice().fill(0);
780
781    // Set the default configuration parameters:
782    lm_set_level(state, state.level);
783
784    state.strstart = 0;
785    state.block_start = 0;
786    state.lookahead = 0;
787    state.insert = 0;
788    state.prev_length = 0;
789    state.match_available = false;
790    state.match_start = 0;
791    state.ins_h = 0;
792}
793
794fn lm_set_level(state: &mut State, level: i8) {
795    state.max_lazy_match = CONFIGURATION_TABLE[level as usize].max_lazy;
796    state.good_match = CONFIGURATION_TABLE[level as usize].good_length;
797    state.nice_match = CONFIGURATION_TABLE[level as usize].nice_length;
798    state.max_chain_length = CONFIGURATION_TABLE[level as usize].max_chain;
799
800    state.hash_calc_variant = HashCalcVariant::for_max_chain_length(state.max_chain_length);
801    state.level = level;
802}
803
804pub fn tune(
805    stream: &mut DeflateStream,
806    good_length: usize,
807    max_lazy: usize,
808    nice_length: usize,
809    max_chain: usize,
810) -> ReturnCode {
811    stream.state.good_match = good_length as u16;
812    stream.state.max_lazy_match = max_lazy as u16;
813    stream.state.nice_match = nice_length as u16;
814    stream.state.max_chain_length = max_chain as u16;
815
816    ReturnCode::Ok
817}
818
819#[repr(C)]
820#[derive(Debug, Clone, Copy, PartialEq, Eq)]
821pub(crate) struct Value {
822    a: u16,
823    b: u16,
824}
825
826impl Value {
827    pub(crate) const fn new(a: u16, b: u16) -> Self {
828        Self { a, b }
829    }
830
831    pub(crate) fn freq_mut(&mut self) -> &mut u16 {
832        &mut self.a
833    }
834
835    pub(crate) fn code_mut(&mut self) -> &mut u16 {
836        &mut self.a
837    }
838
839    pub(crate) fn dad_mut(&mut self) -> &mut u16 {
840        &mut self.b
841    }
842
843    pub(crate) fn len_mut(&mut self) -> &mut u16 {
844        &mut self.b
845    }
846
847    #[inline(always)]
848    pub(crate) const fn freq(self) -> u16 {
849        self.a
850    }
851
852    #[inline(always)]
853    pub(crate) const fn code(self) -> u16 {
854        self.a
855    }
856
857    #[inline(always)]
858    pub(crate) const fn dad(self) -> u16 {
859        self.b
860    }
861
862    #[inline(always)]
863    pub(crate) const fn len(self) -> u16 {
864        self.b
865    }
866}
867
868/// number of length codes, not counting the special END_BLOCK code
869pub(crate) const LENGTH_CODES: usize = 29;
870
871/// number of literal bytes 0..255
872const LITERALS: usize = 256;
873
874/// number of Literal or Length codes, including the END_BLOCK code
875pub(crate) const L_CODES: usize = LITERALS + 1 + LENGTH_CODES;
876
877/// number of distance codes
878pub(crate) const D_CODES: usize = 30;
879
880/// number of codes used to transfer the bit lengths
881const BL_CODES: usize = 19;
882
883/// maximum heap size
884const HEAP_SIZE: usize = 2 * L_CODES + 1;
885
886/// all codes must not exceed MAX_BITS bits
887const MAX_BITS: usize = 15;
888
889/// Bit length codes must not exceed MAX_BL_BITS bits
890const MAX_BL_BITS: usize = 7;
891
892pub(crate) const DIST_CODE_LEN: usize = 512;
893
894struct BitWriter<'a> {
895    pub(crate) pending: Pending<'a>, // output still pending
896    pub(crate) bit_buffer: u64,
897    pub(crate) bits_used: u8,
898
899    /// total bit length of compressed file (NOTE: zlib-ng uses a 32-bit integer here)
900    #[cfg(feature = "ZLIB_DEBUG")]
901    compressed_len: usize,
902    /// bit length of compressed data sent (NOTE: zlib-ng uses a 32-bit integer here)
903    #[cfg(feature = "ZLIB_DEBUG")]
904    bits_sent: usize,
905}
906
907#[inline]
908const fn encode_len(ltree: &[Value], lc: u8) -> (u64, usize) {
909    let mut lc = lc as usize;
910
911    /* Send the length code, len is the match length - STD_MIN_MATCH */
912    let code = self::trees_tbl::LENGTH_CODE[lc] as usize;
913    let c = code + LITERALS + 1;
914    assert!(c < L_CODES, "bad l_code");
915    // send_code_trace(s, c);
916
917    let lnode = ltree[c];
918    let mut match_bits: u64 = lnode.code() as u64;
919    let mut match_bits_len = lnode.len() as usize;
920    let extra = StaticTreeDesc::EXTRA_LBITS[code] as usize;
921    if extra != 0 {
922        lc -= self::trees_tbl::BASE_LENGTH[code] as usize;
923        match_bits |= (lc as u64) << match_bits_len;
924        match_bits_len += extra;
925    }
926
927    (match_bits, match_bits_len)
928}
929
930#[inline]
931const fn encode_dist(dtree: &[Value], mut dist: u16) -> (u64, usize) {
932    dist -= 1; /* dist is now the match distance - 1 */
933    let code = State::d_code(dist as usize) as usize;
934    assert!(code < D_CODES, "bad d_code");
935    // send_code_trace(s, code);
936
937    /* Send the distance code */
938    let dnode = dtree[code];
939    let mut match_bits = dnode.code() as u64;
940    let mut match_bits_len = dnode.len() as usize;
941    let extra = StaticTreeDesc::EXTRA_DBITS[code] as usize;
942    if extra != 0 {
943        dist -= self::trees_tbl::BASE_DIST[code];
944        match_bits |= (dist as u64) << match_bits_len;
945        match_bits_len += extra;
946    }
947
948    (match_bits, match_bits_len)
949}
950
951impl<'a> BitWriter<'a> {
952    pub(crate) const BIT_BUF_SIZE: u8 = 64;
953
954    fn from_pending(pending: Pending<'a>) -> Self {
955        Self {
956            pending,
957            bit_buffer: 0,
958            bits_used: 0,
959
960            #[cfg(feature = "ZLIB_DEBUG")]
961            compressed_len: 0,
962            #[cfg(feature = "ZLIB_DEBUG")]
963            bits_sent: 0,
964        }
965    }
966
967    fn flush_bits(&mut self) {
968        debug_assert!(self.bits_used <= 64);
969        let removed = self.bits_used.saturating_sub(7).next_multiple_of(8);
970        let keep_bytes = self.bits_used / 8; // can never divide by zero
971
972        let src = &self.bit_buffer.to_le_bytes();
973        self.pending.extend(&src[..keep_bytes as usize]);
974
975        self.bits_used -= removed;
976        self.bit_buffer = self.bit_buffer.checked_shr(removed as u32).unwrap_or(0);
977    }
978
979    fn emit_align(&mut self) {
980        debug_assert!(self.bits_used <= 64);
981        let keep_bytes = self.bits_used.div_ceil(8);
982        let src = &self.bit_buffer.to_le_bytes();
983        self.pending.extend(&src[..keep_bytes as usize]);
984
985        self.bits_used = 0;
986        self.bit_buffer = 0;
987
988        self.sent_bits_align();
989    }
990
991    fn send_bits_trace(&self, _value: u64, _len: u8) {
992        trace!(" l {:>2} v {:>4x} ", _len, _value);
993    }
994
995    fn cmpr_bits_add(&mut self, _len: usize) {
996        #[cfg(feature = "ZLIB_DEBUG")]
997        {
998            self.compressed_len += _len;
999        }
1000    }
1001
1002    fn cmpr_bits_align(&mut self) {
1003        #[cfg(feature = "ZLIB_DEBUG")]
1004        {
1005            self.compressed_len = self.compressed_len.next_multiple_of(8);
1006        }
1007    }
1008
1009    fn sent_bits_add(&mut self, _len: usize) {
1010        #[cfg(feature = "ZLIB_DEBUG")]
1011        {
1012            self.bits_sent += _len;
1013        }
1014    }
1015
1016    fn sent_bits_align(&mut self) {
1017        #[cfg(feature = "ZLIB_DEBUG")]
1018        {
1019            self.bits_sent = self.bits_sent.next_multiple_of(8);
1020        }
1021    }
1022
1023    #[inline(always)]
1024    fn send_bits(&mut self, val: u64, len: u8) {
1025        debug_assert!(len <= 64);
1026        debug_assert!(self.bits_used <= 64);
1027
1028        let total_bits = len + self.bits_used;
1029
1030        self.send_bits_trace(val, len);
1031        self.sent_bits_add(len as usize);
1032
1033        if total_bits < Self::BIT_BUF_SIZE {
1034            self.bit_buffer |= val << self.bits_used;
1035            self.bits_used = total_bits;
1036        } else {
1037            self.send_bits_overflow(val, total_bits);
1038        }
1039    }
1040
1041    fn send_bits_overflow(&mut self, val: u64, total_bits: u8) {
1042        if self.bits_used == Self::BIT_BUF_SIZE {
1043            self.pending.extend(&self.bit_buffer.to_le_bytes());
1044            self.bit_buffer = val;
1045            self.bits_used = total_bits - Self::BIT_BUF_SIZE;
1046        } else {
1047            self.bit_buffer |= val << self.bits_used;
1048            self.pending.extend(&self.bit_buffer.to_le_bytes());
1049            self.bit_buffer = val >> (Self::BIT_BUF_SIZE - self.bits_used);
1050            self.bits_used = total_bits - Self::BIT_BUF_SIZE;
1051        }
1052    }
1053
1054    fn send_code(&mut self, code: usize, tree: &[Value]) {
1055        let node = tree[code];
1056        self.send_bits(node.code() as u64, node.len() as u8)
1057    }
1058
1059    /// Send one empty static block to give enough lookahead for inflate.
1060    /// This takes 10 bits, of which 7 may remain in the bit buffer.
1061    pub fn align(&mut self) {
1062        self.emit_tree(BlockType::StaticTrees, false);
1063        self.emit_end_block(&STATIC_LTREE, false);
1064        self.flush_bits();
1065    }
1066
1067    pub(crate) fn emit_tree(&mut self, block_type: BlockType, is_last_block: bool) {
1068        let header_bits = (block_type as u64) << 1 | (is_last_block as u64);
1069        self.send_bits(header_bits, 3);
1070        trace!("\n--- Emit Tree: Last: {}\n", is_last_block as u8);
1071    }
1072
1073    pub(crate) fn emit_end_block_and_align(&mut self, ltree: &[Value], is_last_block: bool) {
1074        self.emit_end_block(ltree, is_last_block);
1075
1076        if is_last_block {
1077            self.emit_align();
1078        }
1079    }
1080
1081    fn emit_end_block(&mut self, ltree: &[Value], _is_last_block: bool) {
1082        const END_BLOCK: usize = 256;
1083        self.send_code(END_BLOCK, ltree);
1084
1085        trace!(
1086            "\n+++ Emit End Block: Last: {} Pending: {} Total Out: {}\n",
1087            _is_last_block as u8,
1088            self.pending.pending().len(),
1089            "<unknown>"
1090        );
1091    }
1092
1093    pub(crate) fn emit_lit(&mut self, ltree: &[Value], c: u8) -> u16 {
1094        self.send_code(c as usize, ltree);
1095
1096        #[cfg(feature = "ZLIB_DEBUG")]
1097        if let Some(c) = char::from_u32(c as u32) {
1098            if isgraph(c as u8) {
1099                trace!(" '{}' ", c);
1100            }
1101        }
1102
1103        ltree[c as usize].len()
1104    }
1105
1106    pub(crate) fn emit_dist(
1107        &mut self,
1108        ltree: &[Value],
1109        dtree: &[Value],
1110        lc: u8,
1111        dist: u16,
1112    ) -> usize {
1113        let (mut match_bits, mut match_bits_len) = encode_len(ltree, lc);
1114
1115        let (dist_match_bits, dist_match_bits_len) = encode_dist(dtree, dist);
1116
1117        match_bits |= dist_match_bits << match_bits_len;
1118        match_bits_len += dist_match_bits_len;
1119
1120        self.send_bits(match_bits, match_bits_len as u8);
1121
1122        match_bits_len
1123    }
1124
1125    pub(crate) fn emit_dist_static(&mut self, lc: u8, dist: u16) -> usize {
1126        let precomputed_len = trees_tbl::STATIC_LTREE_ENCODINGS[lc as usize];
1127        let mut match_bits = precomputed_len.code() as u64;
1128        let mut match_bits_len = precomputed_len.len() as usize;
1129
1130        let dtree = self::trees_tbl::STATIC_DTREE.as_slice();
1131        let (dist_match_bits, dist_match_bits_len) = encode_dist(dtree, dist);
1132
1133        match_bits |= dist_match_bits << match_bits_len;
1134        match_bits_len += dist_match_bits_len;
1135
1136        self.send_bits(match_bits, match_bits_len as u8);
1137
1138        match_bits_len
1139    }
1140
1141    fn compress_block_help(&mut self, sym_buf: &[u8], ltree: &[Value], dtree: &[Value]) {
1142        for chunk in sym_buf.chunks_exact(3) {
1143            let [dist_low, dist_high, lc] = *chunk else {
1144                unreachable!("out of bound access on the symbol buffer");
1145            };
1146
1147            match u16::from_le_bytes([dist_low, dist_high]) {
1148                0 => self.emit_lit(ltree, lc) as usize,
1149                dist => self.emit_dist(ltree, dtree, lc, dist),
1150            };
1151        }
1152
1153        self.emit_end_block(ltree, false)
1154    }
1155
1156    fn send_tree(&mut self, tree: &[Value], bl_tree: &[Value], max_code: usize) {
1157        /* tree: the tree to be scanned */
1158        /* max_code and its largest code of non zero frequency */
1159        let mut prevlen: isize = -1; /* last emitted length */
1160        let mut curlen; /* length of current code */
1161        let mut nextlen = tree[0].len(); /* length of next code */
1162        let mut count = 0; /* repeat count of the current code */
1163        let mut max_count = 7; /* max repeat count */
1164        let mut min_count = 4; /* min repeat count */
1165
1166        /* tree[max_code+1].Len = -1; */
1167        /* guard already set */
1168        if nextlen == 0 {
1169            max_count = 138;
1170            min_count = 3;
1171        }
1172
1173        for n in 0..=max_code {
1174            curlen = nextlen;
1175            nextlen = tree[n + 1].len();
1176            count += 1;
1177            if count < max_count && curlen == nextlen {
1178                continue;
1179            } else if count < min_count {
1180                loop {
1181                    self.send_code(curlen as usize, bl_tree);
1182
1183                    count -= 1;
1184                    if count == 0 {
1185                        break;
1186                    }
1187                }
1188            } else if curlen != 0 {
1189                if curlen as isize != prevlen {
1190                    self.send_code(curlen as usize, bl_tree);
1191                    count -= 1;
1192                }
1193                assert!((3..=6).contains(&count), " 3_6?");
1194                self.send_code(REP_3_6, bl_tree);
1195                self.send_bits(count - 3, 2);
1196            } else if count <= 10 {
1197                self.send_code(REPZ_3_10, bl_tree);
1198                self.send_bits(count - 3, 3);
1199            } else {
1200                self.send_code(REPZ_11_138, bl_tree);
1201                self.send_bits(count - 11, 7);
1202            }
1203
1204            count = 0;
1205            prevlen = curlen as isize;
1206
1207            if nextlen == 0 {
1208                max_count = 138;
1209                min_count = 3;
1210            } else if curlen == nextlen {
1211                max_count = 6;
1212                min_count = 3;
1213            } else {
1214                max_count = 7;
1215                min_count = 4;
1216            }
1217        }
1218    }
1219}
1220
1221#[repr(C, align(64))]
1222pub(crate) struct State<'a> {
1223    status: Status,
1224
1225    last_flush: i8, /* value of flush param for previous deflate call */
1226
1227    pub(crate) wrap: i8, /* bit 0 true for zlib, bit 1 true for gzip */
1228
1229    pub(crate) strategy: Strategy,
1230    pub(crate) level: i8,
1231
1232    /// Whether or not a block is currently open for the QUICK deflation scheme.
1233    /// 0 if the block is closed, 1 if there is an active block, or 2 if there
1234    /// is an active block and it is the last block.
1235    pub(crate) block_open: u8,
1236
1237    pub(crate) hash_calc_variant: HashCalcVariant,
1238
1239    pub(crate) match_available: bool, /* set if previous match exists */
1240
1241    /// Use a faster search when the previous match is longer than this
1242    pub(crate) good_match: u16,
1243
1244    /// Stop searching when current match exceeds this
1245    pub(crate) nice_match: u16,
1246
1247    pub(crate) match_start: Pos,      /* start of matching string */
1248    pub(crate) prev_match: Pos,       /* previous match */
1249    pub(crate) strstart: usize,       /* start of string to insert */
1250
1251    pub(crate) window: Window<'a>,
1252    pub(crate) w_size: usize,    /* LZ77 window size (32K by default) */
1253    pub(crate) w_mask: usize,    /* w_size - 1 */
1254
1255    _cache_line_0: (),
1256
1257    /// prev[N], where N is an offset in the current window, contains the offset in the window
1258    /// of the previous 4-byte sequence that hashes to the same value as the 4-byte sequence
1259    /// starting at N. Together with head, prev forms a chained hash table that can be used
1260    /// to find earlier strings in the window that are potential matches for new input being
1261    /// deflated.
1262    pub(crate) prev: WeakSliceMut<'a, u16>,
1263    /// head[H] contains the offset of the last 4-character sequence seen so far in
1264    /// the current window that hashes to H (as calculated using the hash_calc_variant).
1265    pub(crate) head: WeakArrayMut<'a, u16, HASH_SIZE>,
1266
1267    /// Length of the best match at previous step. Matches not greater than this
1268    /// are discarded. This is used in the lazy match evaluation.
1269    pub(crate) prev_length: u16,
1270
1271    /// To speed up deflation, hash chains are never searched beyond this length.
1272    /// A higher limit improves compression ratio but degrades the speed.
1273    pub(crate) max_chain_length: u16,
1274
1275    // TODO untangle this mess! zlib uses the same field differently based on compression level
1276    // we should just have 2 fields for clarity!
1277    //
1278    // Insert new strings in the hash table only if the match length is not
1279    // greater than this length. This saves time but degrades compression.
1280    // max_insert_length is used only for compression levels <= 3.
1281    // define max_insert_length  max_lazy_match
1282    /// Attempt to find a better match only when the current match is strictly smaller
1283    /// than this value. This mechanism is used only for compression levels >= 4.
1284    pub(crate) max_lazy_match: u16,
1285
1286    /// number of string matches in current block
1287    /// NOTE: this is a saturating 8-bit counter, to help keep the struct compact. The code that
1288    /// makes decisions based on this field only cares whether the count is greater than 2, so
1289    /// an 8-bit counter is sufficient.
1290    pub(crate) matches: u8,
1291
1292    /// Window position at the beginning of the current output block. Gets
1293    /// negative when the window is moved backwards.
1294    pub(crate) block_start: isize,
1295
1296    pub(crate) sym_buf: ReadBuf<'a>,
1297
1298    _cache_line_1: (),
1299
1300    /// Size of match buffer for literals/lengths.  There are 4 reasons for
1301    /// limiting lit_bufsize to 64K:
1302    ///   - frequencies can be kept in 16 bit counters
1303    ///   - if compression is not successful for the first block, all input
1304    ///     data is still in the window so we can still emit a stored block even
1305    ///     when input comes from standard input.  (This can also be done for
1306    ///     all blocks if lit_bufsize is not greater than 32K.)
1307    ///   - if compression is not successful for a file smaller than 64K, we can
1308    ///     even emit a stored file instead of a stored block (saving 5 bytes).
1309    ///     This is applicable only for zip (not gzip or zlib).
1310    ///   - creating new Huffman trees less frequently may not provide fast
1311    ///     adaptation to changes in the input data statistics. (Take for
1312    ///     example a binary file with poorly compressible code followed by
1313    ///     a highly compressible string table.) Smaller buffer sizes give
1314    ///     fast adaptation but have of course the overhead of transmitting
1315    ///     trees more frequently.
1316    ///   - I can't count above 4
1317    lit_bufsize: usize,
1318
1319    /// Actual size of window: 2*w_size, except when the user input buffer is directly used as sliding window.
1320    pub(crate) window_size: usize,
1321
1322    bit_writer: BitWriter<'a>,
1323
1324    _cache_line_2: (),
1325
1326    /// bit length of current block with optimal trees
1327    opt_len: usize,
1328    /// bit length of current block with static trees
1329    static_len: usize,
1330
1331    /// bytes at end of window left to insert
1332    pub(crate) insert: usize,
1333
1334    pub(crate) lookahead: usize, /* number of valid bytes ahead in window */
1335
1336    ///  hash index of string to be inserted
1337    pub(crate) ins_h: u32,
1338
1339    gzhead: Option<&'a mut gz_header>,
1340    gzindex: usize,
1341
1342    _padding_0: usize,
1343
1344    _cache_line_3: (),
1345
1346    crc_fold: crate::crc32::Crc32Fold,
1347
1348    l_desc: TreeDesc<HEAP_SIZE>,             /* literal and length tree */
1349    d_desc: TreeDesc<{ 2 * D_CODES + 1 }>,   /* distance tree */
1350    bl_desc: TreeDesc<{ 2 * BL_CODES + 1 }>, /* Huffman tree for bit lengths */
1351}
1352
1353#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Default)]
1354#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
1355pub enum Strategy {
1356    #[default]
1357    Default = 0,
1358    Filtered = 1,
1359    HuffmanOnly = 2,
1360    Rle = 3,
1361    Fixed = 4,
1362}
1363
1364impl TryFrom<i32> for Strategy {
1365    type Error = ();
1366
1367    fn try_from(value: i32) -> Result<Self, Self::Error> {
1368        match value {
1369            0 => Ok(Strategy::Default),
1370            1 => Ok(Strategy::Filtered),
1371            2 => Ok(Strategy::HuffmanOnly),
1372            3 => Ok(Strategy::Rle),
1373            4 => Ok(Strategy::Fixed),
1374            _ => Err(()),
1375        }
1376    }
1377}
1378
1379#[derive(Debug, PartialEq, Eq)]
1380enum DataType {
1381    Binary = 0,
1382    Text = 1,
1383    Unknown = 2,
1384}
1385
1386impl<'a> State<'a> {
1387    pub const BIT_BUF_SIZE: u8 = BitWriter::BIT_BUF_SIZE;
1388
1389    // log2(w_size)  (in the range MIN_WBITS..=MAX_WBITS)
1390    pub(crate) fn w_bits(&self) -> u32 {
1391        self.w_size.trailing_zeros()
1392    }
1393
1394    pub(crate) fn max_dist(&self) -> usize {
1395        self.w_size - MIN_LOOKAHEAD
1396    }
1397
1398    // TODO untangle this mess! zlib uses the same field differently based on compression level
1399    // we should just have 2 fields for clarity!
1400    pub(crate) fn max_insert_length(&self) -> usize {
1401        self.max_lazy_match as usize
1402    }
1403
1404    /// Total size of the pending buf. But because `pending` shares memory with `sym_buf`, this is
1405    /// not the number of bytes that are actually in `pending`!
1406    pub(crate) fn pending_buf_size(&self) -> usize {
1407        self.lit_bufsize * 4
1408    }
1409
1410    #[inline(always)]
1411    pub(crate) fn update_hash(&self, h: u32, val: u32) -> u32 {
1412        match self.hash_calc_variant {
1413            HashCalcVariant::Standard => StandardHashCalc::update_hash(h, val),
1414            HashCalcVariant::Roll => RollHashCalc::update_hash(h, val),
1415        }
1416    }
1417
1418    #[inline(always)]
1419    pub(crate) fn quick_insert_string(&mut self, string: usize) -> u16 {
1420        match self.hash_calc_variant {
1421            HashCalcVariant::Standard => StandardHashCalc::quick_insert_string(self, string),
1422            HashCalcVariant::Roll => RollHashCalc::quick_insert_string(self, string),
1423        }
1424    }
1425
1426    #[inline(always)]
1427    pub(crate) fn insert_string(&mut self, string: usize, count: usize) {
1428        match self.hash_calc_variant {
1429            HashCalcVariant::Standard => StandardHashCalc::insert_string(self, string, count),
1430            HashCalcVariant::Roll => RollHashCalc::insert_string(self, string, count),
1431        }
1432    }
1433
1434    #[inline(always)]
1435    pub(crate) fn tally_lit(&mut self, unmatched: u8) -> bool {
1436        Self::tally_lit_help(&mut self.sym_buf, &mut self.l_desc, unmatched)
1437    }
1438
1439    #[inline(always)]
1440    pub(crate) fn tally_lit_help(
1441        sym_buf: &mut ReadBuf<'a>,
1442        l_desc: &mut TreeDesc<HEAP_SIZE>,
1443        unmatched: u8,
1444    ) -> bool {
1445        sym_buf.push_lit(unmatched);
1446
1447        *l_desc.dyn_tree[unmatched as usize].freq_mut() += 1;
1448
1449        assert!(
1450            unmatched as usize <= STD_MAX_MATCH - STD_MIN_MATCH,
1451            "zng_tr_tally: bad literal"
1452        );
1453
1454        // signal that the current block should be flushed
1455        sym_buf.len() == sym_buf.capacity() - 3
1456    }
1457
1458    const fn d_code(dist: usize) -> u8 {
1459        let index = if dist < 256 { dist } else { 256 + (dist >> 7) };
1460        self::trees_tbl::DIST_CODE[index]
1461    }
1462
1463    #[inline(always)]
1464    pub(crate) fn tally_dist(&mut self, mut dist: usize, len: usize) -> bool {
1465        self.sym_buf.push_dist(dist as u16, len as u8);
1466
1467        self.matches = self.matches.saturating_add(1);
1468        dist -= 1;
1469
1470        assert!(
1471            dist < self.max_dist() && Self::d_code(dist) < D_CODES as u8,
1472            "tally_dist: bad match"
1473        );
1474
1475        let index = self::trees_tbl::LENGTH_CODE[len] as usize + LITERALS + 1;
1476        *self.l_desc.dyn_tree[index].freq_mut() += 1;
1477
1478        *self.d_desc.dyn_tree[Self::d_code(dist) as usize].freq_mut() += 1;
1479
1480        // signal that the current block should be flushed
1481        self.sym_buf.len() == self.sym_buf.capacity() - 3
1482    }
1483
1484    fn detect_data_type(dyn_tree: &[Value]) -> DataType {
1485        // set bits 0..6, 14..25, and 28..31
1486        // 0xf3ffc07f = binary 11110011111111111100000001111111
1487        const NON_TEXT: u64 = 0xf3ffc07f;
1488        let mut mask = NON_TEXT;
1489
1490        /* Check for non-textual bytes. */
1491        for value in &dyn_tree[0..32] {
1492            if (mask & 1) != 0 && value.freq() != 0 {
1493                return DataType::Binary;
1494            }
1495
1496            mask >>= 1;
1497        }
1498
1499        /* Check for textual bytes. */
1500        if dyn_tree[9].freq() != 0 || dyn_tree[10].freq() != 0 || dyn_tree[13].freq() != 0 {
1501            return DataType::Text;
1502        }
1503
1504        if dyn_tree[32..LITERALS].iter().any(|v| v.freq() != 0) {
1505            return DataType::Text;
1506        }
1507
1508        // there are no explicit text or non-text bytes. The stream is either empty or has only
1509        // tolerated bytes
1510        DataType::Binary
1511    }
1512
1513    fn compress_block_static_trees(&mut self) {
1514        let ltree = self::trees_tbl::STATIC_LTREE.as_slice();
1515        for chunk in self.sym_buf.filled().chunks_exact(3) {
1516            let [dist_low, dist_high, lc] = *chunk else {
1517                unreachable!("out of bound access on the symbol buffer");
1518            };
1519
1520            match u16::from_le_bytes([dist_low, dist_high]) {
1521                0 => self.bit_writer.emit_lit(ltree, lc) as usize,
1522                dist => self.bit_writer.emit_dist_static(lc, dist),
1523            };
1524        }
1525
1526        self.bit_writer.emit_end_block(ltree, false)
1527    }
1528
1529    fn compress_block_dynamic_trees(&mut self) {
1530        self.bit_writer.compress_block_help(
1531            self.sym_buf.filled(),
1532            &self.l_desc.dyn_tree,
1533            &self.d_desc.dyn_tree,
1534        );
1535    }
1536
1537    fn header(&self) -> u16 {
1538        // preset dictionary flag in zlib header
1539        const PRESET_DICT: u16 = 0x20;
1540
1541        // The deflate compression method (the only one supported in this version)
1542        const Z_DEFLATED: u16 = 8;
1543
1544        let dict = match self.strstart {
1545            0 => 0,
1546            _ => PRESET_DICT,
1547        };
1548
1549        let h =
1550            (Z_DEFLATED + ((self.w_bits() as u16 - 8) << 4)) << 8 | (self.level_flags() << 6) | dict;
1551
1552        h + 31 - (h % 31)
1553    }
1554
1555    fn level_flags(&self) -> u16 {
1556        if self.strategy >= Strategy::HuffmanOnly || self.level < 2 {
1557            0
1558        } else if self.level < 6 {
1559            1
1560        } else if self.level == 6 {
1561            2
1562        } else {
1563            3
1564        }
1565    }
1566
1567    fn zng_tr_init(&mut self) {
1568        self.l_desc.stat_desc = &StaticTreeDesc::L;
1569
1570        self.d_desc.stat_desc = &StaticTreeDesc::D;
1571
1572        self.bl_desc.stat_desc = &StaticTreeDesc::BL;
1573
1574        self.bit_writer.bit_buffer = 0;
1575        self.bit_writer.bits_used = 0;
1576
1577        #[cfg(feature = "ZLIB_DEBUG")]
1578        {
1579            self.bit_writer.compressed_len = 0;
1580            self.bit_writer.bits_sent = 0;
1581        }
1582
1583        // Initialize the first block of the first file:
1584        self.init_block();
1585    }
1586
1587    /// initializes a new block
1588    fn init_block(&mut self) {
1589        // Initialize the trees.
1590        // TODO would a memset also work here?
1591
1592        for value in &mut self.l_desc.dyn_tree[..L_CODES] {
1593            *value.freq_mut() = 0;
1594        }
1595
1596        for value in &mut self.d_desc.dyn_tree[..D_CODES] {
1597            *value.freq_mut() = 0;
1598        }
1599
1600        for value in &mut self.bl_desc.dyn_tree[..BL_CODES] {
1601            *value.freq_mut() = 0;
1602        }
1603
1604        // end of block literal code
1605        const END_BLOCK: usize = 256;
1606
1607        *self.l_desc.dyn_tree[END_BLOCK].freq_mut() = 1;
1608        self.opt_len = 0;
1609        self.static_len = 0;
1610        self.sym_buf.clear();
1611        self.matches = 0;
1612    }
1613}
1614
1615#[repr(u8)]
1616#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1617enum Status {
1618    Init = 1,
1619
1620    GZip = 4,
1621    Extra = 5,
1622    Name = 6,
1623    Comment = 7,
1624    Hcrc = 8,
1625
1626    Busy = 2,
1627    Finish = 3,
1628}
1629
1630const fn rank_flush(f: i8) -> i8 {
1631    // rank Z_BLOCK between Z_NO_FLUSH and Z_PARTIAL_FLUSH
1632    ((f) * 2) - (if (f) > 4 { 9 } else { 0 })
1633}
1634
1635#[derive(Debug)]
1636pub(crate) enum BlockState {
1637    /// block not completed, need more input or more output
1638    NeedMore = 0,
1639    /// block flush performed
1640    BlockDone = 1,
1641    /// finish started, need only more output at next deflate
1642    FinishStarted = 2,
1643    /// finish done, accept no more input or output
1644    FinishDone = 3,
1645}
1646
1647// Maximum stored block length in deflate format (not including header).
1648pub(crate) const MAX_STORED: usize = 65535; // so u16::max
1649
1650pub(crate) fn read_buf_window(stream: &mut DeflateStream, offset: usize, size: usize) -> usize {
1651    let len = Ord::min(stream.avail_in as usize, size);
1652
1653    if len == 0 {
1654        return 0;
1655    }
1656
1657    stream.avail_in -= len as u32;
1658
1659    if stream.state.wrap == 2 {
1660        // we likely cannot fuse the crc32 and the copy here because the input can be changed by
1661        // a concurrent thread. Therefore it cannot be converted into a slice!
1662        let window = &mut stream.state.window;
1663        // SAFETY: len is bounded by avail_in, so this copy is in bounds.
1664        unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1665
1666        let data = &stream.state.window.filled()[offset..][..len];
1667        stream.state.crc_fold.fold(data, CRC32_INITIAL_VALUE);
1668    } else if stream.state.wrap == 1 {
1669        // we likely cannot fuse the adler32 and the copy here because the input can be changed by
1670        // a concurrent thread. Therefore it cannot be converted into a slice!
1671        let window = &mut stream.state.window;
1672        // SAFETY: len is bounded by avail_in, so this copy is in bounds.
1673        unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1674
1675        let data = &stream.state.window.filled()[offset..][..len];
1676        stream.adler = adler32(stream.adler as u32, data) as _;
1677    } else {
1678        let window = &mut stream.state.window;
1679        // SAFETY: len is bounded by avail_in, so this copy is in bounds.
1680        unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1681    }
1682
1683    stream.next_in = stream.next_in.wrapping_add(len);
1684    stream.total_in += len as crate::c_api::z_size;
1685
1686    len
1687}
1688
1689pub(crate) enum BlockType {
1690    StoredBlock = 0,
1691    StaticTrees = 1,
1692    DynamicTrees = 2,
1693}
1694
1695pub(crate) fn zng_tr_stored_block(
1696    state: &mut State,
1697    window_range: core::ops::Range<usize>,
1698    is_last: bool,
1699) {
1700    // send block type
1701    state.bit_writer.emit_tree(BlockType::StoredBlock, is_last);
1702
1703    // align on byte boundary
1704    state.bit_writer.emit_align();
1705
1706    state.bit_writer.cmpr_bits_align();
1707
1708    let input_block: &[u8] = &state.window.filled()[window_range];
1709    let stored_len = input_block.len() as u16;
1710
1711    state.bit_writer.pending.extend(&stored_len.to_le_bytes());
1712    state
1713        .bit_writer
1714        .pending
1715        .extend(&(!stored_len).to_le_bytes());
1716
1717    state.bit_writer.cmpr_bits_add(32);
1718    state.bit_writer.sent_bits_add(32);
1719    if stored_len > 0 {
1720        state.bit_writer.pending.extend(input_block);
1721        state.bit_writer.cmpr_bits_add((stored_len << 3) as usize);
1722        state.bit_writer.sent_bits_add((stored_len << 3) as usize);
1723    }
1724}
1725
1726/// The minimum match length mandated by the deflate standard
1727pub(crate) const STD_MIN_MATCH: usize = 3;
1728/// The maximum match length mandated by the deflate standard
1729pub(crate) const STD_MAX_MATCH: usize = 258;
1730
1731/// The minimum wanted match length, affects deflate_quick, deflate_fast, deflate_medium and deflate_slow
1732pub(crate) const WANT_MIN_MATCH: usize = 4;
1733
1734pub(crate) const MIN_LOOKAHEAD: usize = STD_MAX_MATCH + STD_MIN_MATCH + 1;
1735
1736#[inline]
1737pub(crate) fn fill_window(stream: &mut DeflateStream) {
1738    debug_assert!(stream.state.lookahead < MIN_LOOKAHEAD);
1739
1740    let wsize = stream.state.w_size;
1741
1742    loop {
1743        let state = &mut *stream.state;
1744        let mut more = state.window_size - state.lookahead - state.strstart;
1745
1746        // If the window is almost full and there is insufficient lookahead,
1747        // move the upper half to the lower one to make room in the upper half.
1748        if state.strstart >= wsize + state.max_dist() {
1749            // shift the window to the left
1750            let (old, new) = state.window.filled_mut()[..2 * wsize].split_at_mut(wsize);
1751            old.copy_from_slice(new);
1752
1753            state.match_start = state.match_start.saturating_sub(wsize as u16);
1754            if state.match_start == 0 {
1755                state.prev_length = 0;
1756            }
1757
1758            state.strstart -= wsize; /* we now have strstart >= MAX_DIST */
1759            state.block_start -= wsize as isize;
1760            state.insert = Ord::min(state.insert, state.strstart);
1761
1762            self::slide_hash::slide_hash(state);
1763
1764            more += wsize;
1765        }
1766
1767        if stream.avail_in == 0 {
1768            break;
1769        }
1770
1771        // If there was no sliding:
1772        //    strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
1773        //    more == window_size - lookahead - strstart
1774        // => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
1775        // => more >= window_size - 2*WSIZE + 2
1776        // In the BIG_MEM or MMAP case (not yet supported),
1777        //   window_size == input_size + MIN_LOOKAHEAD  &&
1778        //   strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
1779        // Otherwise, window_size == 2*WSIZE so more >= 2.
1780        // If there was sliding, more >= WSIZE. So in all cases, more >= 2.
1781        assert!(more >= 2, "more < 2");
1782
1783        let n = read_buf_window(stream, stream.state.strstart + stream.state.lookahead, more);
1784
1785        let state = &mut *stream.state;
1786        state.lookahead += n;
1787
1788        // Initialize the hash value now that we have some input:
1789        if state.lookahead + state.insert >= STD_MIN_MATCH {
1790            let string = state.strstart - state.insert;
1791            if state.max_chain_length > 1024 {
1792                let v0 = state.window.filled()[string] as u32;
1793                let v1 = state.window.filled()[string + 1] as u32;
1794                state.ins_h = state.update_hash(v0, v1);
1795            } else if string >= 1 {
1796                state.quick_insert_string(string + 2 - STD_MIN_MATCH);
1797            }
1798            let mut count = state.insert;
1799            if state.lookahead == 1 {
1800                count -= 1;
1801            }
1802            if count > 0 {
1803                state.insert_string(string, count);
1804                state.insert -= count;
1805            }
1806        }
1807
1808        // If the whole input has less than STD_MIN_MATCH bytes, ins_h is garbage,
1809        // but this is not important since only literal bytes will be emitted.
1810
1811        if !(stream.state.lookahead < MIN_LOOKAHEAD && stream.avail_in != 0) {
1812            break;
1813        }
1814    }
1815
1816    assert!(
1817        stream.state.strstart <= stream.state.window_size - MIN_LOOKAHEAD,
1818        "not enough room for search"
1819    );
1820}
1821
1822pub(crate) struct StaticTreeDesc {
1823    /// static tree or NULL
1824    pub(crate) static_tree: &'static [Value],
1825    /// extra bits for each code or NULL
1826    extra_bits: &'static [u8],
1827    /// base index for extra_bits
1828    extra_base: usize,
1829    /// max number of elements in the tree
1830    elems: usize,
1831    /// max bit length for the codes
1832    max_length: u16,
1833}
1834
1835impl StaticTreeDesc {
1836    const EMPTY: Self = Self {
1837        static_tree: &[],
1838        extra_bits: &[],
1839        extra_base: 0,
1840        elems: 0,
1841        max_length: 0,
1842    };
1843
1844    /// extra bits for each length code
1845    const EXTRA_LBITS: [u8; LENGTH_CODES] = [
1846        0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0,
1847    ];
1848
1849    /// extra bits for each distance code
1850    const EXTRA_DBITS: [u8; D_CODES] = [
1851        0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12,
1852        13, 13,
1853    ];
1854
1855    /// extra bits for each bit length code
1856    const EXTRA_BLBITS: [u8; BL_CODES] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7];
1857
1858    /// The lengths of the bit length codes are sent in order of decreasing
1859    /// probability, to avoid transmitting the lengths for unused bit length codes.
1860    const BL_ORDER: [u8; BL_CODES] = [
1861        16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15,
1862    ];
1863
1864    pub(crate) const L: Self = Self {
1865        static_tree: &self::trees_tbl::STATIC_LTREE,
1866        extra_bits: &Self::EXTRA_LBITS,
1867        extra_base: LITERALS + 1,
1868        elems: L_CODES,
1869        max_length: MAX_BITS as u16,
1870    };
1871
1872    pub(crate) const D: Self = Self {
1873        static_tree: &self::trees_tbl::STATIC_DTREE,
1874        extra_bits: &Self::EXTRA_DBITS,
1875        extra_base: 0,
1876        elems: D_CODES,
1877        max_length: MAX_BITS as u16,
1878    };
1879
1880    pub(crate) const BL: Self = Self {
1881        static_tree: &[],
1882        extra_bits: &Self::EXTRA_BLBITS,
1883        extra_base: 0,
1884        elems: BL_CODES,
1885        max_length: MAX_BL_BITS as u16,
1886    };
1887}
1888
1889#[derive(Clone)]
1890pub(crate) struct TreeDesc<const N: usize> {
1891    dyn_tree: [Value; N],
1892    max_code: usize,
1893    stat_desc: &'static StaticTreeDesc,
1894}
1895
1896impl<const N: usize> TreeDesc<N> {
1897    const EMPTY: Self = Self {
1898        dyn_tree: [Value::new(0, 0); N],
1899        max_code: 0,
1900        stat_desc: &StaticTreeDesc::EMPTY,
1901    };
1902}
1903
1904fn build_tree<const N: usize>(state: &mut State, desc: &mut TreeDesc<N>) {
1905    let tree = &mut desc.dyn_tree;
1906    let stree = desc.stat_desc.static_tree;
1907    let elements = desc.stat_desc.elems;
1908
1909    let mut heap = Heap::new();
1910    let mut max_code = heap.initialize(&mut tree[..elements]);
1911
1912    // The pkzip format requires that at least one distance code exists,
1913    // and that at least one bit should be sent even if there is only one
1914    // possible code. So to avoid special checks later on we force at least
1915    // two codes of non zero frequency.
1916    while heap.heap_len < 2 {
1917        heap.heap_len += 1;
1918        let node = if max_code < 2 {
1919            max_code += 1;
1920            max_code
1921        } else {
1922            0
1923        };
1924
1925        debug_assert!(node >= 0);
1926        let node = node as usize;
1927
1928        heap.heap[heap.heap_len] = node as u32;
1929        *tree[node].freq_mut() = 1;
1930        heap.depth[node] = 0;
1931        state.opt_len -= 1;
1932        if !stree.is_empty() {
1933            state.static_len -= stree[node].len() as usize;
1934        }
1935        /* node is 0 or 1 so it does not have extra bits */
1936    }
1937
1938    debug_assert!(max_code >= 0);
1939    let max_code = max_code as usize;
1940    desc.max_code = max_code;
1941
1942    // The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
1943    // establish sub-heaps of increasing lengths:
1944    let mut n = heap.heap_len / 2;
1945    while n >= 1 {
1946        heap.pqdownheap(tree, n);
1947        n -= 1;
1948    }
1949
1950    heap.construct_huffman_tree(tree, elements);
1951
1952    // At this point, the fields freq and dad are set. We can now
1953    // generate the bit lengths.
1954    let bl_count = gen_bitlen(state, &mut heap, desc);
1955
1956    // The field len is now set, we can generate the bit codes
1957    gen_codes(&mut desc.dyn_tree, max_code, &bl_count);
1958}
1959
1960fn gen_bitlen<const N: usize>(
1961    state: &mut State,
1962    heap: &mut Heap,
1963    desc: &mut TreeDesc<N>,
1964) -> [u16; MAX_BITS + 1] {
1965    let tree = &mut desc.dyn_tree;
1966    let max_code = desc.max_code;
1967    let stree = desc.stat_desc.static_tree;
1968    let extra = desc.stat_desc.extra_bits;
1969    let base = desc.stat_desc.extra_base;
1970    let max_length = desc.stat_desc.max_length;
1971
1972    let mut bl_count = [0u16; MAX_BITS + 1];
1973
1974    // In a first pass, compute the optimal bit lengths (which may
1975    // overflow in the case of the bit length tree).
1976    *tree[heap.heap[heap.heap_max] as usize].len_mut() = 0; /* root of the heap */
1977
1978    // number of elements with bit length too large
1979    let mut overflow: i32 = 0;
1980
1981    for h in heap.heap_max + 1..HEAP_SIZE {
1982        let n = heap.heap[h] as usize;
1983        let mut bits = tree[tree[n].dad() as usize].len() + 1;
1984
1985        if bits > max_length {
1986            bits = max_length;
1987            overflow += 1;
1988        }
1989
1990        // We overwrite tree[n].Dad which is no longer needed
1991        *tree[n].len_mut() = bits;
1992
1993        // not a leaf node
1994        if n > max_code {
1995            continue;
1996        }
1997
1998        bl_count[bits as usize] += 1;
1999        let mut xbits = 0;
2000        if n >= base {
2001            xbits = extra[n - base] as usize;
2002        }
2003
2004        let f = tree[n].freq() as usize;
2005        state.opt_len += f * (bits as usize + xbits);
2006
2007        if !stree.is_empty() {
2008            state.static_len += f * (stree[n].len() as usize + xbits);
2009        }
2010    }
2011
2012    if overflow == 0 {
2013        return bl_count;
2014    }
2015
2016    /* Find the first bit length which could increase: */
2017    loop {
2018        let mut bits = max_length as usize - 1;
2019        while bl_count[bits] == 0 {
2020            bits -= 1;
2021        }
2022        bl_count[bits] -= 1; /* move one leaf down the tree */
2023        bl_count[bits + 1] += 2; /* move one overflow item as its brother */
2024        bl_count[max_length as usize] -= 1;
2025        /* The brother of the overflow item also moves one step up,
2026         * but this does not affect bl_count[max_length]
2027         */
2028        overflow -= 2;
2029
2030        if overflow <= 0 {
2031            break;
2032        }
2033    }
2034
2035    // Now recompute all bit lengths, scanning in increasing frequency.
2036    // h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
2037    // lengths instead of fixing only the wrong ones. This idea is taken
2038    // from 'ar' written by Haruhiko Okumura.)
2039    let mut h = HEAP_SIZE;
2040    for bits in (1..=max_length).rev() {
2041        let mut n = bl_count[bits as usize];
2042        while n != 0 {
2043            h -= 1;
2044            let m = heap.heap[h] as usize;
2045            if m > max_code {
2046                continue;
2047            }
2048
2049            if tree[m].len() != bits {
2050                // Tracev((stderr, "code %d bits %d->%u\n", m, tree[m].Len, bits));
2051                state.opt_len += (bits * tree[m].freq()) as usize;
2052                state.opt_len -= (tree[m].len() * tree[m].freq()) as usize;
2053                *tree[m].len_mut() = bits;
2054            }
2055
2056            n -= 1;
2057        }
2058    }
2059    bl_count
2060}
2061
2062/// Checks that symbol is a printing character (excluding space)
2063#[allow(unused)]
2064fn isgraph(c: u8) -> bool {
2065    (c > 0x20) && (c <= 0x7E)
2066}
2067
2068fn gen_codes(tree: &mut [Value], max_code: usize, bl_count: &[u16]) {
2069    /* tree: the tree to decorate */
2070    /* max_code: largest code with non zero frequency */
2071    /* bl_count: number of codes at each bit length */
2072    let mut next_code = [0; MAX_BITS + 1]; /* next code value for each bit length */
2073    let mut code = 0; /* running code value */
2074
2075    /* The distribution counts are first used to generate the code values
2076     * without bit reversal.
2077     */
2078    for bits in 1..=MAX_BITS {
2079        code = (code + bl_count[bits - 1]) << 1;
2080        next_code[bits] = code;
2081    }
2082
2083    /* Check that the bit counts in bl_count are consistent. The last code
2084     * must be all ones.
2085     */
2086    assert!(
2087        code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
2088        "inconsistent bit counts"
2089    );
2090
2091    trace!("\ngen_codes: max_code {max_code} ");
2092
2093    for n in 0..=max_code {
2094        let len = tree[n].len();
2095        if len == 0 {
2096            continue;
2097        }
2098
2099        /* Now reverse the bits */
2100        assert!((1..=15).contains(&len), "code length must be 1-15");
2101        *tree[n].code_mut() = next_code[len as usize].reverse_bits() >> (16 - len);
2102        next_code[len as usize] += 1;
2103
2104        if tree != self::trees_tbl::STATIC_LTREE.as_slice() {
2105            trace!(
2106                "\nn {:>3} {} l {:>2} c {:>4x} ({:x}) ",
2107                n,
2108                if isgraph(n as u8) {
2109                    char::from_u32(n as u32).unwrap()
2110                } else {
2111                    ' '
2112                },
2113                len,
2114                tree[n].code(),
2115                next_code[len as usize] - 1
2116            );
2117        }
2118    }
2119}
2120
2121/// repeat previous bit length 3-6 times (2 bits of repeat count)
2122const REP_3_6: usize = 16;
2123
2124/// repeat a zero length 3-10 times  (3 bits of repeat count)
2125const REPZ_3_10: usize = 17;
2126
2127/// repeat a zero length 11-138 times  (7 bits of repeat count)
2128const REPZ_11_138: usize = 18;
2129
2130fn scan_tree(bl_desc: &mut TreeDesc<{ 2 * BL_CODES + 1 }>, tree: &mut [Value], max_code: usize) {
2131    /* tree: the tree to be scanned */
2132    /* max_code: and its largest code of non zero frequency */
2133    let mut prevlen = -1isize; /* last emitted length */
2134    let mut curlen: isize; /* length of current code */
2135    let mut nextlen = tree[0].len(); /* length of next code */
2136    let mut count = 0; /* repeat count of the current code */
2137    let mut max_count = 7; /* max repeat count */
2138    let mut min_count = 4; /* min repeat count */
2139
2140    if nextlen == 0 {
2141        max_count = 138;
2142        min_count = 3;
2143    }
2144
2145    *tree[max_code + 1].len_mut() = 0xffff; /* guard */
2146
2147    let bl_tree = &mut bl_desc.dyn_tree;
2148
2149    for n in 0..=max_code {
2150        curlen = nextlen as isize;
2151        nextlen = tree[n + 1].len();
2152        count += 1;
2153        if count < max_count && curlen == nextlen as isize {
2154            continue;
2155        } else if count < min_count {
2156            *bl_tree[curlen as usize].freq_mut() += count;
2157        } else if curlen != 0 {
2158            if curlen != prevlen {
2159                *bl_tree[curlen as usize].freq_mut() += 1;
2160            }
2161            *bl_tree[REP_3_6].freq_mut() += 1;
2162        } else if count <= 10 {
2163            *bl_tree[REPZ_3_10].freq_mut() += 1;
2164        } else {
2165            *bl_tree[REPZ_11_138].freq_mut() += 1;
2166        }
2167
2168        count = 0;
2169        prevlen = curlen;
2170
2171        if nextlen == 0 {
2172            max_count = 138;
2173            min_count = 3;
2174        } else if curlen == nextlen as isize {
2175            max_count = 6;
2176            min_count = 3;
2177        } else {
2178            max_count = 7;
2179            min_count = 4;
2180        }
2181    }
2182}
2183
2184fn send_all_trees(state: &mut State, lcodes: usize, dcodes: usize, blcodes: usize) {
2185    assert!(
2186        lcodes >= 257 && dcodes >= 1 && blcodes >= 4,
2187        "not enough codes"
2188    );
2189    assert!(
2190        lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
2191        "too many codes"
2192    );
2193
2194    trace!("\nbl counts: ");
2195    state.bit_writer.send_bits(lcodes as u64 - 257, 5); /* not +255 as stated in appnote.txt */
2196    state.bit_writer.send_bits(dcodes as u64 - 1, 5);
2197    state.bit_writer.send_bits(blcodes as u64 - 4, 4); /* not -3 as stated in appnote.txt */
2198
2199    for rank in 0..blcodes {
2200        trace!("\nbl code {:>2} ", StaticTreeDesc::BL_ORDER[rank]);
2201        state.bit_writer.send_bits(
2202            state.bl_desc.dyn_tree[StaticTreeDesc::BL_ORDER[rank] as usize].len() as u64,
2203            3,
2204        );
2205    }
2206    trace!("\nbl tree: sent {}", state.bit_writer.bits_sent);
2207
2208    // literal tree
2209    state
2210        .bit_writer
2211        .send_tree(&state.l_desc.dyn_tree, &state.bl_desc.dyn_tree, lcodes - 1);
2212    trace!("\nlit tree: sent {}", state.bit_writer.bits_sent);
2213
2214    // distance tree
2215    state
2216        .bit_writer
2217        .send_tree(&state.d_desc.dyn_tree, &state.bl_desc.dyn_tree, dcodes - 1);
2218    trace!("\ndist tree: sent {}", state.bit_writer.bits_sent);
2219}
2220
2221/// Construct the Huffman tree for the bit lengths and return the index in
2222/// bl_order of the last bit length code to send.
2223fn build_bl_tree(state: &mut State) -> usize {
2224    /* Determine the bit length frequencies for literal and distance trees */
2225
2226    scan_tree(
2227        &mut state.bl_desc,
2228        &mut state.l_desc.dyn_tree,
2229        state.l_desc.max_code,
2230    );
2231
2232    scan_tree(
2233        &mut state.bl_desc,
2234        &mut state.d_desc.dyn_tree,
2235        state.d_desc.max_code,
2236    );
2237
2238    /* Build the bit length tree: */
2239    {
2240        let mut tmp = TreeDesc::EMPTY;
2241        core::mem::swap(&mut tmp, &mut state.bl_desc);
2242        build_tree(state, &mut tmp);
2243        core::mem::swap(&mut tmp, &mut state.bl_desc);
2244    }
2245
2246    /* opt_len now includes the length of the tree representations, except
2247     * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
2248     */
2249
2250    /* Determine the number of bit length codes to send. The pkzip format
2251     * requires that at least 4 bit length codes be sent. (appnote.txt says
2252     * 3 but the actual value used is 4.)
2253     */
2254    let mut max_blindex = BL_CODES - 1;
2255    while max_blindex >= 3 {
2256        let index = StaticTreeDesc::BL_ORDER[max_blindex] as usize;
2257        if state.bl_desc.dyn_tree[index].len() != 0 {
2258            break;
2259        }
2260
2261        max_blindex -= 1;
2262    }
2263
2264    /* Update opt_len to include the bit length tree and counts */
2265    state.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4;
2266    trace!(
2267        "\ndyn trees: dyn {}, stat {}",
2268        state.opt_len,
2269        state.static_len
2270    );
2271
2272    max_blindex
2273}
2274
2275fn zng_tr_flush_block(
2276    stream: &mut DeflateStream,
2277    window_offset: Option<usize>,
2278    stored_len: u32,
2279    last: bool,
2280) {
2281    /* window_offset: offset of the input block into the window */
2282    /* stored_len: length of input block */
2283    /* last: one if this is the last block for a file */
2284
2285    let mut opt_lenb;
2286    let static_lenb;
2287    let mut max_blindex = 0;
2288
2289    let state = &mut stream.state;
2290
2291    if state.sym_buf.is_empty() {
2292        opt_lenb = 0;
2293        static_lenb = 0;
2294        state.static_len = 7;
2295    } else if state.level > 0 {
2296        if stream.data_type == DataType::Unknown as i32 {
2297            stream.data_type = State::detect_data_type(&state.l_desc.dyn_tree) as i32;
2298        }
2299
2300        {
2301            let mut tmp = TreeDesc::EMPTY;
2302            core::mem::swap(&mut tmp, &mut state.l_desc);
2303
2304            build_tree(state, &mut tmp);
2305            core::mem::swap(&mut tmp, &mut state.l_desc);
2306
2307            trace!(
2308                "\nlit data: dyn {}, stat {}",
2309                state.opt_len,
2310                state.static_len
2311            );
2312        }
2313
2314        {
2315            let mut tmp = TreeDesc::EMPTY;
2316            core::mem::swap(&mut tmp, &mut state.d_desc);
2317            build_tree(state, &mut tmp);
2318            core::mem::swap(&mut tmp, &mut state.d_desc);
2319
2320            trace!(
2321                "\ndist data: dyn {}, stat {}",
2322                state.opt_len,
2323                state.static_len
2324            );
2325        }
2326
2327        // Build the bit length tree for the above two trees, and get the index
2328        // in bl_order of the last bit length code to send.
2329        max_blindex = build_bl_tree(state);
2330
2331        // Determine the best encoding. Compute the block lengths in bytes.
2332        opt_lenb = (state.opt_len + 3 + 7) >> 3;
2333        static_lenb = (state.static_len + 3 + 7) >> 3;
2334
2335        trace!(
2336            "\nopt {}({}) stat {}({}) stored {} lit {} ",
2337            opt_lenb,
2338            state.opt_len,
2339            static_lenb,
2340            state.static_len,
2341            stored_len,
2342            state.sym_buf.len() / 3
2343        );
2344
2345        if static_lenb <= opt_lenb || state.strategy == Strategy::Fixed {
2346            opt_lenb = static_lenb;
2347        }
2348    } else {
2349        assert!(window_offset.is_some(), "lost buf");
2350        /* force a stored block */
2351        opt_lenb = stored_len as usize + 5;
2352        static_lenb = stored_len as usize + 5;
2353    }
2354
2355    if stored_len as usize + 4 <= opt_lenb && window_offset.is_some() {
2356        /* 4: two words for the lengths
2357         * The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
2358         * Otherwise we can't have processed more than WSIZE input bytes since
2359         * the last block flush, because compression would have been
2360         * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
2361         * transform a block into a stored block.
2362         */
2363        let window_offset = window_offset.unwrap();
2364        let range = window_offset..window_offset + stored_len as usize;
2365        zng_tr_stored_block(state, range, last);
2366    } else if static_lenb == opt_lenb {
2367        state.bit_writer.emit_tree(BlockType::StaticTrees, last);
2368        state.compress_block_static_trees();
2369    // cmpr_bits_add(s, s.static_len);
2370    } else {
2371        state.bit_writer.emit_tree(BlockType::DynamicTrees, last);
2372        send_all_trees(
2373            state,
2374            state.l_desc.max_code + 1,
2375            state.d_desc.max_code + 1,
2376            max_blindex + 1,
2377        );
2378
2379        state.compress_block_dynamic_trees();
2380    }
2381
2382    // TODO
2383    // This check is made mod 2^32, for files larger than 512 MB and unsigned long implemented on 32 bits.
2384    // assert_eq!(state.compressed_len, state.bits_sent, "bad compressed size");
2385
2386    state.init_block();
2387    if last {
2388        state.bit_writer.emit_align();
2389    }
2390
2391    // Tracev((stderr, "\ncomprlen {}(%lu) ", s->compressed_len>>3, s->compressed_len-7*last));
2392}
2393
2394pub(crate) fn flush_block_only(stream: &mut DeflateStream, is_last: bool) {
2395    zng_tr_flush_block(
2396        stream,
2397        (stream.state.block_start >= 0).then_some(stream.state.block_start as usize),
2398        (stream.state.strstart as isize - stream.state.block_start) as u32,
2399        is_last,
2400    );
2401
2402    stream.state.block_start = stream.state.strstart as isize;
2403    flush_pending(stream)
2404}
2405
2406#[must_use]
2407fn flush_bytes(stream: &mut DeflateStream, mut bytes: &[u8]) -> ControlFlow<ReturnCode> {
2408    let mut state = &mut stream.state;
2409
2410    // we'll be using the pending buffer as temporary storage
2411    let mut beg = state.bit_writer.pending.pending().len(); /* start of bytes to update crc */
2412
2413    while state.bit_writer.pending.remaining() < bytes.len() {
2414        let copy = state.bit_writer.pending.remaining();
2415
2416        state.bit_writer.pending.extend(&bytes[..copy]);
2417
2418        stream.adler = crc32(
2419            stream.adler as u32,
2420            &state.bit_writer.pending.pending()[beg..],
2421        ) as z_checksum;
2422
2423        state.gzindex += copy;
2424        flush_pending(stream);
2425        state = &mut stream.state;
2426
2427        // could not flush all the pending output
2428        if !state.bit_writer.pending.pending().is_empty() {
2429            state.last_flush = -1;
2430            return ControlFlow::Break(ReturnCode::Ok);
2431        }
2432
2433        beg = 0;
2434        bytes = &bytes[copy..];
2435    }
2436
2437    state.bit_writer.pending.extend(bytes);
2438
2439    stream.adler = crc32(
2440        stream.adler as u32,
2441        &state.bit_writer.pending.pending()[beg..],
2442    ) as z_checksum;
2443    state.gzindex = 0;
2444
2445    ControlFlow::Continue(())
2446}
2447
2448pub fn deflate(stream: &mut DeflateStream, flush: DeflateFlush) -> ReturnCode {
2449    if stream.next_out.is_null()
2450        || (stream.avail_in != 0 && stream.next_in.is_null())
2451        || (stream.state.status == Status::Finish && flush != DeflateFlush::Finish)
2452    {
2453        let err = ReturnCode::StreamError;
2454        stream.msg = err.error_message();
2455        return err;
2456    }
2457
2458    if stream.avail_out == 0 {
2459        let err = ReturnCode::BufError;
2460        stream.msg = err.error_message();
2461        return err;
2462    }
2463
2464    let old_flush = stream.state.last_flush;
2465    stream.state.last_flush = flush as i8;
2466
2467    /* Flush as much pending output as possible */
2468    if !stream.state.bit_writer.pending.pending().is_empty() {
2469        flush_pending(stream);
2470        if stream.avail_out == 0 {
2471            /* Since avail_out is 0, deflate will be called again with
2472             * more output space, but possibly with both pending and
2473             * avail_in equal to zero. There won't be anything to do,
2474             * but this is not an error situation so make sure we
2475             * return OK instead of BUF_ERROR at next call of deflate:
2476             */
2477            stream.state.last_flush = -1;
2478            return ReturnCode::Ok;
2479        }
2480
2481        /* Make sure there is something to do and avoid duplicate consecutive
2482         * flushes. For repeated and useless calls with Z_FINISH, we keep
2483         * returning Z_STREAM_END instead of Z_BUF_ERROR.
2484         */
2485    } else if stream.avail_in == 0
2486        && rank_flush(flush as i8) <= rank_flush(old_flush)
2487        && flush != DeflateFlush::Finish
2488    {
2489        let err = ReturnCode::BufError;
2490        stream.msg = err.error_message();
2491        return err;
2492    }
2493
2494    /* User must not provide more input after the first FINISH: */
2495    if stream.state.status == Status::Finish && stream.avail_in != 0 {
2496        let err = ReturnCode::BufError;
2497        stream.msg = err.error_message();
2498        return err;
2499    }
2500
2501    /* Write the header */
2502    if stream.state.status == Status::Init && stream.state.wrap == 0 {
2503        stream.state.status = Status::Busy;
2504    }
2505
2506    if stream.state.status == Status::Init {
2507        let header = stream.state.header();
2508        stream
2509            .state
2510            .bit_writer
2511            .pending
2512            .extend(&header.to_be_bytes());
2513
2514        /* Save the adler32 of the preset dictionary: */
2515        if stream.state.strstart != 0 {
2516            let adler = stream.adler as u32;
2517            stream.state.bit_writer.pending.extend(&adler.to_be_bytes());
2518        }
2519
2520        stream.adler = ADLER32_INITIAL_VALUE as _;
2521        stream.state.status = Status::Busy;
2522
2523        // compression must start with an empty pending buffer
2524        flush_pending(stream);
2525
2526        if !stream.state.bit_writer.pending.pending().is_empty() {
2527            stream.state.last_flush = -1;
2528
2529            return ReturnCode::Ok;
2530        }
2531    }
2532
2533    if stream.state.status == Status::GZip {
2534        /* gzip header */
2535        stream.state.crc_fold = Crc32Fold::new();
2536
2537        stream.state.bit_writer.pending.extend(&[31, 139, 8]);
2538
2539        let extra_flags = if stream.state.level == 9 {
2540            2
2541        } else if stream.state.strategy >= Strategy::HuffmanOnly || stream.state.level < 2 {
2542            4
2543        } else {
2544            0
2545        };
2546
2547        match &stream.state.gzhead {
2548            None => {
2549                let bytes = [0, 0, 0, 0, 0, extra_flags, gz_header::OS_CODE];
2550                stream.state.bit_writer.pending.extend(&bytes);
2551                stream.state.status = Status::Busy;
2552
2553                /* Compression must start with an empty pending buffer */
2554                flush_pending(stream);
2555                if !stream.state.bit_writer.pending.pending().is_empty() {
2556                    stream.state.last_flush = -1;
2557                    return ReturnCode::Ok;
2558                }
2559            }
2560            Some(gzhead) => {
2561                stream.state.bit_writer.pending.extend(&[gzhead.flags()]);
2562                let bytes = (gzhead.time as u32).to_le_bytes();
2563                stream.state.bit_writer.pending.extend(&bytes);
2564                stream
2565                    .state
2566                    .bit_writer
2567                    .pending
2568                    .extend(&[extra_flags, gzhead.os as u8]);
2569
2570                if !gzhead.extra.is_null() {
2571                    let bytes = (gzhead.extra_len as u16).to_le_bytes();
2572                    stream.state.bit_writer.pending.extend(&bytes);
2573                }
2574
2575                if gzhead.hcrc > 0 {
2576                    stream.adler = crc32(
2577                        stream.adler as u32,
2578                        stream.state.bit_writer.pending.pending(),
2579                    ) as z_checksum
2580                }
2581
2582                stream.state.gzindex = 0;
2583                stream.state.status = Status::Extra;
2584            }
2585        }
2586    }
2587
2588    if stream.state.status == Status::Extra {
2589        if let Some(gzhead) = stream.state.gzhead.as_ref() {
2590            if !gzhead.extra.is_null() {
2591                let gzhead_extra = gzhead.extra;
2592
2593                let extra = unsafe {
2594                    core::slice::from_raw_parts(
2595                        // SAFETY: gzindex is always less than extra_len, and the user
2596                        // guarantees the pointer is valid for extra_len.
2597                        gzhead_extra.add(stream.state.gzindex),
2598                        (gzhead.extra_len & 0xffff) as usize - stream.state.gzindex,
2599                    )
2600                };
2601
2602                if let ControlFlow::Break(err) = flush_bytes(stream, extra) {
2603                    return err;
2604                }
2605            }
2606        }
2607        stream.state.status = Status::Name;
2608    }
2609
2610    if stream.state.status == Status::Name {
2611        if let Some(gzhead) = stream.state.gzhead.as_ref() {
2612            if !gzhead.name.is_null() {
2613                // SAFETY: user satisfies precondition that gzhead.name is a C string.
2614                let gzhead_name = unsafe { CStr::from_ptr(gzhead.name.cast()) };
2615                let bytes = gzhead_name.to_bytes_with_nul();
2616                if let ControlFlow::Break(err) = flush_bytes(stream, bytes) {
2617                    return err;
2618                }
2619            }
2620            stream.state.status = Status::Comment;
2621        }
2622    }
2623
2624    if stream.state.status == Status::Comment {
2625        if let Some(gzhead) = stream.state.gzhead.as_ref() {
2626            if !gzhead.comment.is_null() {
2627                // SAFETY: user satisfies precondition that gzhead.name is a C string.
2628                let gzhead_comment = unsafe { CStr::from_ptr(gzhead.comment.cast()) };
2629                let bytes = gzhead_comment.to_bytes_with_nul();
2630                if let ControlFlow::Break(err) = flush_bytes(stream, bytes) {
2631                    return err;
2632                }
2633            }
2634            stream.state.status = Status::Hcrc;
2635        }
2636    }
2637
2638    if stream.state.status == Status::Hcrc {
2639        if let Some(gzhead) = stream.state.gzhead.as_ref() {
2640            if gzhead.hcrc != 0 {
2641                let bytes = (stream.adler as u16).to_le_bytes();
2642                if let ControlFlow::Break(err) = flush_bytes(stream, &bytes) {
2643                    return err;
2644                }
2645            }
2646        }
2647
2648        stream.state.status = Status::Busy;
2649
2650        // compression must start with an empty pending buffer
2651        flush_pending(stream);
2652        if !stream.state.bit_writer.pending.pending().is_empty() {
2653            stream.state.last_flush = -1;
2654            return ReturnCode::Ok;
2655        }
2656    }
2657
2658    // Start a new block or continue the current one.
2659    let state = &mut stream.state;
2660    if stream.avail_in != 0
2661        || state.lookahead != 0
2662        || (flush != DeflateFlush::NoFlush && state.status != Status::Finish)
2663    {
2664        let bstate = self::algorithm::run(stream, flush);
2665
2666        let state = &mut stream.state;
2667
2668        if matches!(bstate, BlockState::FinishStarted | BlockState::FinishDone) {
2669            state.status = Status::Finish;
2670        }
2671
2672        match bstate {
2673            BlockState::NeedMore | BlockState::FinishStarted => {
2674                if stream.avail_out == 0 {
2675                    state.last_flush = -1; /* avoid BUF_ERROR next call, see above */
2676                }
2677                return ReturnCode::Ok;
2678                /* If flush != Z_NO_FLUSH && avail_out == 0, the next call
2679                 * of deflate should use the same flush parameter to make sure
2680                 * that the flush is complete. So we don't have to output an
2681                 * empty block here, this will be done at next call. This also
2682                 * ensures that for a very small output buffer, we emit at most
2683                 * one empty block.
2684                 */
2685            }
2686            BlockState::BlockDone => {
2687                match flush {
2688                    DeflateFlush::NoFlush => unreachable!("condition of inner surrounding if"),
2689                    DeflateFlush::PartialFlush => {
2690                        state.bit_writer.align();
2691                    }
2692                    DeflateFlush::SyncFlush => {
2693                        // add an empty stored block that is marked as not final. This is useful for
2694                        // parallel deflate where we want to make sure the intermediate blocks are not
2695                        // marked as "last block".
2696                        zng_tr_stored_block(state, 0..0, false);
2697                    }
2698                    DeflateFlush::FullFlush => {
2699                        // add an empty stored block that is marked as not final. This is useful for
2700                        // parallel deflate where we want to make sure the intermediate blocks are not
2701                        // marked as "last block".
2702                        zng_tr_stored_block(state, 0..0, false);
2703
2704                        state.head.as_mut_slice().fill(0); // forget history
2705
2706                        if state.lookahead == 0 {
2707                            state.strstart = 0;
2708                            state.block_start = 0;
2709                            state.insert = 0;
2710                        }
2711                    }
2712                    DeflateFlush::Block => { /* fall through */ }
2713                    DeflateFlush::Finish => unreachable!("condition of outer surrounding if"),
2714                }
2715
2716                flush_pending(stream);
2717
2718                if stream.avail_out == 0 {
2719                    stream.state.last_flush = -1; /* avoid BUF_ERROR at next call, see above */
2720                    return ReturnCode::Ok;
2721                }
2722            }
2723            BlockState::FinishDone => { /* do nothing */ }
2724        }
2725    }
2726
2727    if flush != DeflateFlush::Finish {
2728        return ReturnCode::Ok;
2729    }
2730
2731    // write the trailer
2732    if stream.state.wrap == 2 {
2733        let crc_fold = core::mem::take(&mut stream.state.crc_fold);
2734        stream.adler = crc_fold.finish() as z_checksum;
2735
2736        let adler = stream.adler as u32;
2737        stream.state.bit_writer.pending.extend(&adler.to_le_bytes());
2738
2739        let total_in = stream.total_in as u32;
2740        stream
2741            .state
2742            .bit_writer
2743            .pending
2744            .extend(&total_in.to_le_bytes());
2745    } else if stream.state.wrap == 1 {
2746        let adler = stream.adler as u32;
2747        stream.state.bit_writer.pending.extend(&adler.to_be_bytes());
2748    }
2749
2750    flush_pending(stream);
2751
2752    // If avail_out is zero, the application will call deflate again to flush the rest.
2753    if stream.state.wrap > 0 {
2754        stream.state.wrap = -stream.state.wrap; /* write the trailer only once! */
2755    }
2756
2757    if stream.state.bit_writer.pending.pending().is_empty() {
2758        assert_eq!(stream.state.bit_writer.bits_used, 0, "bi_buf not flushed");
2759        return ReturnCode::StreamEnd;
2760    }
2761    ReturnCode::Ok
2762}
2763
2764pub(crate) fn flush_pending(stream: &mut DeflateStream) {
2765    let state = &mut stream.state;
2766
2767    state.bit_writer.flush_bits();
2768
2769    let pending = state.bit_writer.pending.pending();
2770    let len = Ord::min(pending.len(), stream.avail_out as usize);
2771
2772    if len == 0 {
2773        return;
2774    }
2775
2776    trace!("\n[FLUSH {len} bytes]");
2777    // SAFETY: len is min(pending, stream.avail_out), so we won't overrun next_out.
2778    unsafe { core::ptr::copy_nonoverlapping(pending.as_ptr(), stream.next_out, len) };
2779
2780    stream.next_out = stream.next_out.wrapping_add(len);
2781    stream.total_out += len as crate::c_api::z_size;
2782    stream.avail_out -= len as crate::c_api::uInt;
2783
2784    state.bit_writer.pending.advance(len);
2785}
2786
2787pub fn compress_slice<'a>(
2788    output: &'a mut [u8],
2789    input: &[u8],
2790    config: DeflateConfig,
2791) -> (&'a mut [u8], ReturnCode) {
2792    // SAFETY: a [u8] is a valid [MaybeUninit<u8>].
2793    let output_uninit = unsafe {
2794        core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut MaybeUninit<u8>, output.len())
2795    };
2796
2797    compress(output_uninit, input, config)
2798}
2799
2800pub fn compress<'a>(
2801    output: &'a mut [MaybeUninit<u8>],
2802    input: &[u8],
2803    config: DeflateConfig,
2804) -> (&'a mut [u8], ReturnCode) {
2805    compress_with_flush(output, input, config, DeflateFlush::Finish)
2806}
2807
2808pub fn compress_slice_with_flush<'a>(
2809    output: &'a mut [u8],
2810    input: &[u8],
2811    config: DeflateConfig,
2812    flush: DeflateFlush,
2813) -> (&'a mut [u8], ReturnCode) {
2814    // SAFETY: a [u8] is a valid [MaybeUninit<u8>], and `compress_with_flush` never uninitializes previously initialized memory.
2815    let output_uninit = unsafe {
2816        core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut MaybeUninit<u8>, output.len())
2817    };
2818
2819    compress_with_flush(output_uninit, input, config, flush)
2820}
2821
2822pub fn compress_with_flush<'a>(
2823    output: &'a mut [MaybeUninit<u8>],
2824    input: &[u8],
2825    config: DeflateConfig,
2826    final_flush: DeflateFlush,
2827) -> (&'a mut [u8], ReturnCode) {
2828    let mut stream = z_stream {
2829        next_in: input.as_ptr() as *mut u8,
2830        avail_in: 0, // for special logic in the first  iteration
2831        total_in: 0,
2832        next_out: output.as_mut_ptr() as *mut u8,
2833        avail_out: 0, // for special logic on the first iteration
2834        total_out: 0,
2835        msg: core::ptr::null_mut(),
2836        state: core::ptr::null_mut(),
2837        zalloc: None,
2838        zfree: None,
2839        opaque: core::ptr::null_mut(),
2840        data_type: 0,
2841        adler: 0,
2842        reserved: 0,
2843    };
2844
2845    let err = init(&mut stream, config);
2846    if err != ReturnCode::Ok {
2847        return (&mut [], err);
2848    }
2849
2850    let max = core::ffi::c_uint::MAX as usize;
2851
2852    let mut left = output.len();
2853    let mut source_len = input.len();
2854
2855    loop {
2856        if stream.avail_out == 0 {
2857            stream.avail_out = Ord::min(left, max) as _;
2858            left -= stream.avail_out as usize;
2859        }
2860
2861        if stream.avail_in == 0 {
2862            stream.avail_in = Ord::min(source_len, max) as _;
2863            source_len -= stream.avail_in as usize;
2864        }
2865
2866        let flush = if source_len > 0 {
2867            DeflateFlush::NoFlush
2868        } else {
2869            final_flush
2870        };
2871
2872        let err = if let Some(stream) = unsafe { DeflateStream::from_stream_mut(&mut stream) } {
2873            deflate(stream, flush)
2874        } else {
2875            ReturnCode::StreamError
2876        };
2877
2878        if err != ReturnCode::Ok {
2879            break;
2880        }
2881    }
2882
2883    // SAFETY: we have now initialized these bytes
2884    let output_slice = unsafe {
2885        core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut u8, stream.total_out as usize)
2886    };
2887
2888    // may DataError if insufficient output space
2889    let return_code = if let Some(stream) = unsafe { DeflateStream::from_stream_mut(&mut stream) } {
2890        match end(stream) {
2891            Ok(_) => ReturnCode::Ok,
2892            Err(_) => ReturnCode::DataError,
2893        }
2894    } else {
2895        ReturnCode::Ok
2896    };
2897
2898    (output_slice, return_code)
2899}
2900
2901pub const fn compress_bound(source_len: usize) -> usize {
2902    compress_bound_help(source_len, ZLIB_WRAPLEN)
2903}
2904
2905const fn compress_bound_help(source_len: usize, wrap_len: usize) -> usize {
2906    source_len // The source size itself */
2907        // Always at least one byte for any input
2908        .wrapping_add(if source_len == 0 { 1 } else { 0 })
2909        // One extra byte for lengths less than 9
2910        .wrapping_add(if source_len < 9 { 1 } else { 0 })
2911        // Source encoding overhead, padded to next full byte
2912        .wrapping_add(deflate_quick_overhead(source_len))
2913        // Deflate block overhead bytes
2914        .wrapping_add(DEFLATE_BLOCK_OVERHEAD)
2915        // none, zlib or gzip wrapper
2916        .wrapping_add(wrap_len)
2917}
2918
2919///  heap used to build the Huffman trees
2920///
2921/// The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
2922/// The same heap array is used to build all trees.
2923#[derive(Clone)]
2924struct Heap {
2925    heap: [u32; 2 * L_CODES + 1],
2926
2927    /// number of elements in the heap
2928    heap_len: usize,
2929
2930    /// element of the largest frequency
2931    heap_max: usize,
2932
2933    depth: [u8; 2 * L_CODES + 1],
2934}
2935
2936impl Heap {
2937    // an empty heap
2938    fn new() -> Self {
2939        Self {
2940            heap: [0; 2 * L_CODES + 1],
2941            heap_len: 0,
2942            heap_max: 0,
2943            depth: [0; 2 * L_CODES + 1],
2944        }
2945    }
2946
2947    /// Construct the initial heap, with least frequent element in
2948    /// heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
2949    fn initialize(&mut self, tree: &mut [Value]) -> isize {
2950        let mut max_code = -1;
2951
2952        self.heap_len = 0;
2953        self.heap_max = HEAP_SIZE;
2954
2955        for (n, node) in tree.iter_mut().enumerate() {
2956            if node.freq() > 0 {
2957                self.heap_len += 1;
2958                self.heap[self.heap_len] = n as u32;
2959                max_code = n as isize;
2960                self.depth[n] = 0;
2961            } else {
2962                *node.len_mut() = 0;
2963            }
2964        }
2965
2966        max_code
2967    }
2968
2969    /// Index within the heap array of least frequent node in the Huffman tree
2970    const SMALLEST: usize = 1;
2971
2972    fn pqdownheap(&mut self, tree: &[Value], mut k: usize) {
2973        /* tree: the tree to restore */
2974        /* k: node to move down */
2975
2976        // Given the index $i of a node in the tree, pack the node's frequency and depth
2977        // into a single integer. The heap ordering logic uses a primary sort on frequency
2978        // and a secondary sort on depth, so packing both into one integer makes it
2979        // possible to sort with fewer comparison operations.
2980        macro_rules! freq_and_depth {
2981            ($i:expr) => {
2982                (tree[$i as usize].freq() as u32) << 8 | self.depth[$i as usize] as u32
2983            };
2984        }
2985
2986        let v = self.heap[k];
2987        let v_val = freq_and_depth!(v);
2988        let mut j = k << 1; /* left son of k */
2989
2990        while j <= self.heap_len {
2991            /* Set j to the smallest of the two sons: */
2992            let mut j_val = freq_and_depth!(self.heap[j]);
2993            if j < self.heap_len {
2994                let j1_val = freq_and_depth!(self.heap[j + 1]);
2995                if j1_val <= j_val {
2996                    j += 1;
2997                    j_val = j1_val;
2998                }
2999            }
3000
3001            /* Exit if v is smaller than both sons */
3002            if v_val <= j_val {
3003                break;
3004            }
3005
3006            /* Exchange v with the smallest son */
3007            self.heap[k] = self.heap[j];
3008            k = j;
3009
3010            /* And continue down the tree, setting j to the left son of k */
3011            j <<= 1;
3012        }
3013
3014        self.heap[k] = v;
3015    }
3016
3017    /// Remove the smallest element from the heap and recreate the heap with
3018    /// one less element. Updates heap and heap_len.
3019    fn pqremove(&mut self, tree: &[Value]) -> u32 {
3020        let top = self.heap[Self::SMALLEST];
3021        self.heap[Self::SMALLEST] = self.heap[self.heap_len];
3022        self.heap_len -= 1;
3023
3024        self.pqdownheap(tree, Self::SMALLEST);
3025
3026        top
3027    }
3028
3029    /// Construct the Huffman tree by repeatedly combining the least two frequent nodes.
3030    fn construct_huffman_tree(&mut self, tree: &mut [Value], mut node: usize) {
3031        loop {
3032            let n = self.pqremove(tree) as usize; /* n = node of least frequency */
3033            let m = self.heap[Heap::SMALLEST] as usize; /* m = node of next least frequency */
3034
3035            self.heap_max -= 1;
3036            self.heap[self.heap_max] = n as u32; /* keep the nodes sorted by frequency */
3037            self.heap_max -= 1;
3038            self.heap[self.heap_max] = m as u32;
3039
3040            /* Create a new node father of n and m */
3041            *tree[node].freq_mut() = tree[n].freq() + tree[m].freq();
3042            self.depth[node] = Ord::max(self.depth[n], self.depth[m]) + 1;
3043
3044            *tree[n].dad_mut() = node as u16;
3045            *tree[m].dad_mut() = node as u16;
3046
3047            /* and insert the new node in the heap */
3048            self.heap[Heap::SMALLEST] = node as u32;
3049            node += 1;
3050
3051            self.pqdownheap(tree, Heap::SMALLEST);
3052
3053            if self.heap_len < 2 {
3054                break;
3055            }
3056        }
3057
3058        self.heap_max -= 1;
3059        self.heap[self.heap_max] = self.heap[Heap::SMALLEST];
3060    }
3061}
3062
3063/// # Safety
3064///
3065/// The caller must guarantee:
3066///
3067/// * If `head` is `Some`
3068///     - `head.extra` is `NULL` or is readable for at least `head.extra_len` bytes
3069///     - `head.name` is `NULL` or satisfies the requirements of [`core::ffi::CStr::from_ptr`]
3070///     - `head.comment` is `NULL` or satisfies the requirements of [`core::ffi::CStr::from_ptr`]
3071pub unsafe fn set_header<'a>(
3072    stream: &mut DeflateStream<'a>,
3073    head: Option<&'a mut gz_header>,
3074) -> ReturnCode {
3075    if stream.state.wrap != 2 {
3076        ReturnCode::StreamError as _
3077    } else {
3078        stream.state.gzhead = head;
3079        ReturnCode::Ok as _
3080    }
3081}
3082
3083// zlib format overhead
3084const ZLIB_WRAPLEN: usize = 6;
3085// gzip format overhead
3086const GZIP_WRAPLEN: usize = 18;
3087
3088const DEFLATE_HEADER_BITS: usize = 3;
3089const DEFLATE_EOBS_BITS: usize = 15;
3090const DEFLATE_PAD_BITS: usize = 6;
3091const DEFLATE_BLOCK_OVERHEAD: usize =
3092    (DEFLATE_HEADER_BITS + DEFLATE_EOBS_BITS + DEFLATE_PAD_BITS) >> 3;
3093
3094const DEFLATE_QUICK_LIT_MAX_BITS: usize = 9;
3095const fn deflate_quick_overhead(x: usize) -> usize {
3096    let sum = x
3097        .wrapping_mul(DEFLATE_QUICK_LIT_MAX_BITS - 8)
3098        .wrapping_add(7);
3099
3100    // imitate zlib-ng rounding behavior (on windows, c_ulong is 32 bits)
3101    (sum as core::ffi::c_ulong >> 3) as usize
3102}
3103
3104/// For the default windowBits of 15 and memLevel of 8, this function returns
3105/// a close to exact, as well as small, upper bound on the compressed size.
3106/// They are coded as constants here for a reason--if the #define's are
3107/// changed, then this function needs to be changed as well.  The return
3108/// value for 15 and 8 only works for those exact settings.
3109///
3110/// For any setting other than those defaults for windowBits and memLevel,
3111/// the value returned is a conservative worst case for the maximum expansion
3112/// resulting from using fixed blocks instead of stored blocks, which deflate
3113/// can emit on compressed data for some combinations of the parameters.
3114///
3115/// This function could be more sophisticated to provide closer upper bounds for
3116/// every combination of windowBits and memLevel.  But even the conservative
3117/// upper bound of about 14% expansion does not seem onerous for output buffer
3118/// allocation.
3119pub fn bound(stream: Option<&mut DeflateStream>, source_len: usize) -> usize {
3120    // on windows, c_ulong is only a 32-bit integer
3121    let mask = core::ffi::c_ulong::MAX as usize;
3122
3123    // conservative upper bound for compressed data
3124    let comp_len = source_len
3125        .wrapping_add((source_len.wrapping_add(7) & mask) >> 3)
3126        .wrapping_add((source_len.wrapping_add(63) & mask) >> 6)
3127        .wrapping_add(5);
3128
3129    let Some(stream) = stream else {
3130        // return conservative bound plus zlib wrapper
3131        return comp_len.wrapping_add(6);
3132    };
3133
3134    /* compute wrapper length */
3135    let wrap_len = match stream.state.wrap {
3136        0 => {
3137            // raw deflate
3138            0
3139        }
3140        1 => {
3141            // zlib wrapper
3142            if stream.state.strstart != 0 {
3143                ZLIB_WRAPLEN + 4
3144            } else {
3145                ZLIB_WRAPLEN
3146            }
3147        }
3148        2 => {
3149            // gzip wrapper
3150            let mut gz_wrap_len = GZIP_WRAPLEN;
3151
3152            if let Some(header) = &stream.state.gzhead {
3153                if !header.extra.is_null() {
3154                    gz_wrap_len += 2 + header.extra_len as usize;
3155                }
3156
3157                let mut c_string = header.name;
3158                if !c_string.is_null() {
3159                    loop {
3160                        gz_wrap_len += 1;
3161                        // SAFETY: user guarantees header.name is a valid C string.
3162                        unsafe {
3163                            if *c_string == 0 {
3164                                break;
3165                            }
3166                            c_string = c_string.add(1);
3167                        }
3168                    }
3169                }
3170
3171                let mut c_string = header.comment;
3172                if !c_string.is_null() {
3173                    loop {
3174                        gz_wrap_len += 1;
3175                        // SAFETY: user guarantees header.comment is a valid C string.
3176                        unsafe {
3177                            if *c_string == 0 {
3178                                break;
3179                            }
3180                            c_string = c_string.add(1);
3181                        }
3182                    }
3183                }
3184
3185                if header.hcrc != 0 {
3186                    gz_wrap_len += 2;
3187                }
3188            }
3189
3190            gz_wrap_len
3191        }
3192        _ => {
3193            // default
3194            ZLIB_WRAPLEN
3195        }
3196    };
3197
3198    if stream.state.w_bits() != MAX_WBITS as u32 || HASH_BITS < 15 {
3199        if stream.state.level == 0 {
3200            /* upper bound for stored blocks with length 127 (memLevel == 1) ~4% overhead plus a small constant */
3201            source_len
3202                .wrapping_add(source_len >> 5)
3203                .wrapping_add(source_len >> 7)
3204                .wrapping_add(source_len >> 11)
3205                .wrapping_add(7)
3206                .wrapping_add(wrap_len)
3207        } else {
3208            comp_len.wrapping_add(wrap_len)
3209        }
3210    } else {
3211        compress_bound_help(source_len, wrap_len)
3212    }
3213}
3214
3215#[cfg(test)]
3216mod test {
3217    use crate::{
3218        inflate::{uncompress_slice, InflateConfig, InflateStream},
3219        InflateFlush,
3220    };
3221
3222    use super::*;
3223
3224    use core::{ffi::CStr, sync::atomic::AtomicUsize};
3225
3226    #[test]
3227    fn detect_data_type_basic() {
3228        let empty = || [Value::new(0, 0); LITERALS];
3229
3230        assert_eq!(State::detect_data_type(&empty()), DataType::Binary);
3231
3232        let mut binary = empty();
3233        binary[0] = Value::new(1, 0);
3234        assert_eq!(State::detect_data_type(&binary), DataType::Binary);
3235
3236        let mut text = empty();
3237        text[b'\r' as usize] = Value::new(1, 0);
3238        assert_eq!(State::detect_data_type(&text), DataType::Text);
3239
3240        let mut text = empty();
3241        text[b'a' as usize] = Value::new(1, 0);
3242        assert_eq!(State::detect_data_type(&text), DataType::Text);
3243
3244        let mut non_text = empty();
3245        non_text[7] = Value::new(1, 0);
3246        assert_eq!(State::detect_data_type(&non_text), DataType::Binary);
3247    }
3248
3249    #[test]
3250    fn from_stream_mut() {
3251        unsafe {
3252            assert!(DeflateStream::from_stream_mut(core::ptr::null_mut()).is_none());
3253
3254            let mut stream = z_stream::default();
3255            assert!(DeflateStream::from_stream_mut(&mut stream).is_none());
3256
3257            // state is still NULL
3258            assert!(DeflateStream::from_stream_mut(&mut stream).is_none());
3259
3260            init(&mut stream, DeflateConfig::default());
3261            let stream = DeflateStream::from_stream_mut(&mut stream);
3262            assert!(stream.is_some());
3263
3264            assert!(end(stream.unwrap()).is_ok());
3265        }
3266    }
3267
3268    unsafe extern "C" fn fail_nth_allocation<const N: usize>(
3269        opaque: crate::c_api::voidpf,
3270        items: crate::c_api::uInt,
3271        size: crate::c_api::uInt,
3272    ) -> crate::c_api::voidpf {
3273        let count = unsafe { &*(opaque as *const AtomicUsize) };
3274
3275        if count.fetch_add(1, core::sync::atomic::Ordering::Relaxed) != N {
3276            // must use the C allocator internally because (de)allocation is based on function
3277            // pointer values and because we don't use the rust allocator directly, the allocation
3278            // logic will store the pointer to the start at the start of the allocation.
3279            unsafe { (crate::allocate::Allocator::C.zalloc)(opaque, items, size) }
3280        } else {
3281            core::ptr::null_mut()
3282        }
3283    }
3284
3285    #[test]
3286    fn init_invalid_allocator() {
3287        {
3288            let atomic = AtomicUsize::new(0);
3289            let mut stream = z_stream {
3290                zalloc: Some(fail_nth_allocation::<0>),
3291                zfree: Some(crate::allocate::Allocator::C.zfree),
3292                opaque: &atomic as *const _ as *const core::ffi::c_void as *mut _,
3293                ..z_stream::default()
3294            };
3295            assert_eq!(
3296                init(&mut stream, DeflateConfig::default()),
3297                ReturnCode::MemError
3298            );
3299        }
3300
3301        {
3302            let atomic = AtomicUsize::new(0);
3303            let mut stream = z_stream {
3304                zalloc: Some(fail_nth_allocation::<3>),
3305                zfree: Some(crate::allocate::Allocator::C.zfree),
3306                opaque: &atomic as *const _ as *const core::ffi::c_void as *mut _,
3307                ..z_stream::default()
3308            };
3309            assert_eq!(
3310                init(&mut stream, DeflateConfig::default()),
3311                ReturnCode::MemError
3312            );
3313        }
3314
3315        {
3316            let atomic = AtomicUsize::new(0);
3317            let mut stream = z_stream {
3318                zalloc: Some(fail_nth_allocation::<5>),
3319                zfree: Some(crate::allocate::Allocator::C.zfree),
3320                opaque: &atomic as *const _ as *const core::ffi::c_void as *mut _,
3321                ..z_stream::default()
3322            };
3323            assert_eq!(
3324                init(&mut stream, DeflateConfig::default()),
3325                ReturnCode::MemError
3326            );
3327        }
3328    }
3329
3330    mod copy_invalid_allocator {
3331        use super::*;
3332
3333        #[test]
3334        fn fail_0() {
3335            let mut stream = z_stream::default();
3336
3337            let atomic = AtomicUsize::new(0);
3338            stream.opaque = &atomic as *const _ as *const core::ffi::c_void as *mut _;
3339            stream.zalloc = Some(fail_nth_allocation::<6>);
3340            stream.zfree = Some(crate::allocate::Allocator::C.zfree);
3341
3342            // init performs 6 allocations; we don't want those to fail
3343            assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3344
3345            let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3346                unreachable!()
3347            };
3348
3349            let mut stream_copy = MaybeUninit::<DeflateStream>::zeroed();
3350
3351            assert_eq!(copy(&mut stream_copy, stream), ReturnCode::MemError);
3352
3353            assert!(end(stream).is_ok());
3354        }
3355
3356        #[test]
3357        fn fail_3() {
3358            let mut stream = z_stream::default();
3359
3360            let atomic = AtomicUsize::new(0);
3361            stream.zalloc = Some(fail_nth_allocation::<{ 6 + 3 }>);
3362            stream.zfree = Some(crate::allocate::Allocator::C.zfree);
3363            stream.opaque = &atomic as *const _ as *const core::ffi::c_void as *mut _;
3364
3365            // init performs 6 allocations; we don't want those to fail
3366            assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3367
3368            let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3369                unreachable!()
3370            };
3371
3372            let mut stream_copy = MaybeUninit::<DeflateStream>::zeroed();
3373
3374            assert_eq!(copy(&mut stream_copy, stream), ReturnCode::MemError);
3375
3376            assert!(end(stream).is_ok());
3377        }
3378
3379        #[test]
3380        fn fail_5() {
3381            let mut stream = z_stream::default();
3382
3383            let atomic = AtomicUsize::new(0);
3384            stream.zalloc = Some(fail_nth_allocation::<{ 6 + 5 }>);
3385            stream.zfree = Some(crate::allocate::Allocator::C.zfree);
3386            stream.opaque = &atomic as *const _ as *const core::ffi::c_void as *mut _;
3387
3388            // init performs 6 allocations; we don't want those to fail
3389            assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3390
3391            let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3392                unreachable!()
3393            };
3394
3395            let mut stream_copy = MaybeUninit::<DeflateStream>::zeroed();
3396
3397            assert_eq!(copy(&mut stream_copy, stream), ReturnCode::MemError);
3398
3399            assert!(end(stream).is_ok());
3400        }
3401    }
3402
3403    mod invalid_deflate_config {
3404        use super::*;
3405
3406        #[test]
3407        fn sanity_check() {
3408            let mut stream = z_stream::default();
3409            assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3410
3411            assert!(stream.zalloc.is_some());
3412            assert!(stream.zfree.is_some());
3413
3414            // this should be the default level
3415            let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3416            assert_eq!(stream.state.level, 6);
3417
3418            assert!(end(stream).is_ok());
3419        }
3420
3421        #[test]
3422        fn window_bits_correction() {
3423            // window_bits of 8 gets turned into 9 internally
3424            let mut stream = z_stream::default();
3425            let config = DeflateConfig {
3426                window_bits: 8,
3427                ..Default::default()
3428            };
3429            assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3430            let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3431            assert_eq!(stream.state.w_bits(), 9);
3432
3433            assert!(end(stream).is_ok());
3434        }
3435
3436        #[test]
3437        fn window_bits_too_low() {
3438            let mut stream = z_stream::default();
3439            let config = DeflateConfig {
3440                window_bits: -16,
3441                ..Default::default()
3442            };
3443            assert_eq!(init(&mut stream, config), ReturnCode::StreamError);
3444        }
3445
3446        #[test]
3447        fn window_bits_too_high() {
3448            // window bits too high
3449            let mut stream = z_stream::default();
3450            let config = DeflateConfig {
3451                window_bits: 42,
3452                ..Default::default()
3453            };
3454            assert_eq!(init(&mut stream, config), ReturnCode::StreamError);
3455        }
3456    }
3457
3458    #[test]
3459    fn end_data_error() {
3460        let mut stream = z_stream::default();
3461        assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3462        let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3463
3464        // next deflate into too little space
3465        let input = b"Hello World\n";
3466        stream.next_in = input.as_ptr() as *mut u8;
3467        stream.avail_in = input.len() as _;
3468        let output = &mut [0, 0, 0];
3469        stream.next_out = output.as_mut_ptr();
3470        stream.avail_out = output.len() as _;
3471
3472        // the deflate is fine
3473        assert_eq!(deflate(stream, DeflateFlush::NoFlush), ReturnCode::Ok);
3474
3475        // but end is not
3476        assert!(end(stream).is_err());
3477    }
3478
3479    #[test]
3480    fn test_reset_keep() {
3481        let mut stream = z_stream::default();
3482        assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3483        let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3484
3485        // next deflate into too little space
3486        let input = b"Hello World\n";
3487        stream.next_in = input.as_ptr() as *mut u8;
3488        stream.avail_in = input.len() as _;
3489
3490        let output = &mut [0; 1024];
3491        stream.next_out = output.as_mut_ptr();
3492        stream.avail_out = output.len() as _;
3493        assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3494
3495        assert_eq!(reset_keep(stream), ReturnCode::Ok);
3496
3497        let output = &mut [0; 1024];
3498        stream.next_out = output.as_mut_ptr();
3499        stream.avail_out = output.len() as _;
3500        assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3501
3502        assert!(end(stream).is_ok());
3503    }
3504
3505    #[test]
3506    fn hello_world_huffman_only() {
3507        const EXPECTED: &[u8] = &[
3508            0x78, 0x01, 0xf3, 0x48, 0xcd, 0xc9, 0xc9, 0x57, 0x08, 0xcf, 0x2f, 0xca, 0x49, 0x51,
3509            0xe4, 0x02, 0x00, 0x20, 0x91, 0x04, 0x48,
3510        ];
3511
3512        let input = "Hello World!\n";
3513
3514        let mut output = vec![0; 128];
3515
3516        let config = DeflateConfig {
3517            level: 6,
3518            method: Method::Deflated,
3519            window_bits: crate::MAX_WBITS,
3520            mem_level: DEF_MEM_LEVEL,
3521            strategy: Strategy::HuffmanOnly,
3522        };
3523
3524        let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3525
3526        assert_eq!(err, ReturnCode::Ok);
3527
3528        assert_eq!(output.len(), EXPECTED.len());
3529
3530        assert_eq!(EXPECTED, output);
3531    }
3532
3533    #[test]
3534    fn hello_world_quick() {
3535        const EXPECTED: &[u8] = &[
3536            0x78, 0x01, 0xf3, 0x48, 0xcd, 0xc9, 0xc9, 0x57, 0x08, 0xcf, 0x2f, 0xca, 0x49, 0x51,
3537            0xe4, 0x02, 0x00, 0x20, 0x91, 0x04, 0x48,
3538        ];
3539
3540        let input = "Hello World!\n";
3541
3542        let mut output = vec![0; 128];
3543
3544        let config = DeflateConfig {
3545            level: 1,
3546            method: Method::Deflated,
3547            window_bits: crate::MAX_WBITS,
3548            mem_level: DEF_MEM_LEVEL,
3549            strategy: Strategy::Default,
3550        };
3551
3552        let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3553
3554        assert_eq!(err, ReturnCode::Ok);
3555
3556        assert_eq!(output.len(), EXPECTED.len());
3557
3558        assert_eq!(EXPECTED, output);
3559    }
3560
3561    #[test]
3562    fn hello_world_quick_random() {
3563        const EXPECTED: &[u8] = &[
3564            0x78, 0x01, 0x53, 0xe1, 0x50, 0x51, 0xe1, 0x52, 0x51, 0x51, 0x01, 0x00, 0x03, 0xec,
3565            0x00, 0xeb,
3566        ];
3567
3568        let input = "$\u{8}$$\n$$$";
3569
3570        let mut output = vec![0; 128];
3571
3572        let config = DeflateConfig {
3573            level: 1,
3574            method: Method::Deflated,
3575            window_bits: crate::MAX_WBITS,
3576            mem_level: DEF_MEM_LEVEL,
3577            strategy: Strategy::Default,
3578        };
3579
3580        let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3581
3582        assert_eq!(err, ReturnCode::Ok);
3583
3584        assert_eq!(output.len(), EXPECTED.len());
3585
3586        assert_eq!(EXPECTED, output);
3587    }
3588
3589    fn fuzz_based_test(input: &[u8], config: DeflateConfig, expected: &[u8]) {
3590        let mut output_rs = [0; 1 << 17];
3591        let (output_rs, err) = compress_slice(&mut output_rs, input, config);
3592        assert_eq!(err, ReturnCode::Ok);
3593
3594        assert_eq!(output_rs, expected);
3595    }
3596
3597    #[test]
3598    fn simple_rle() {
3599        fuzz_based_test(
3600            "\0\0\0\0\u{6}".as_bytes(),
3601            DeflateConfig {
3602                level: -1,
3603                method: Method::Deflated,
3604                window_bits: 11,
3605                mem_level: 4,
3606                strategy: Strategy::Rle,
3607            },
3608            &[56, 17, 99, 0, 2, 54, 0, 0, 11, 0, 7],
3609        )
3610    }
3611
3612    #[test]
3613    fn fill_window_out_of_bounds() {
3614        const INPUT: &[u8] = &[
3615            0x71, 0x71, 0x71, 0x71, 0x71, 0x6a, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3616            0x71, 0x71, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x1d, 0x1d, 0x63,
3617            0x63, 0x63, 0x63, 0x63, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d,
3618            0x1d, 0x27, 0x0, 0x0, 0x0, 0x1d, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71,
3619            0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x31, 0x0, 0x0,
3620            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3621            0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50,
3622            0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x48, 0x50,
3623            0x50, 0x50, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2c, 0x0, 0x0, 0x0, 0x0, 0x4a,
3624            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3625            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x70, 0x71, 0x71, 0x0, 0x0,
3626            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x6a, 0x0, 0x0, 0x0, 0x0,
3627            0x71, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3628            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3629            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3630            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x31, 0x0, 0x0, 0x0, 0x0,
3631            0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3632            0x71, 0x71, 0x0, 0x4a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71,
3633            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3634            0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71,
3635            0x6a, 0x0, 0x0, 0x0, 0x0, 0x71, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3636            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3637            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3638            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3639            0x31, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0,
3640            0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50,
3641            0x50, 0x50, 0x50, 0x50, 0x48, 0x50, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x3f, 0x71,
3642            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x50, 0x50, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3643            0x2c, 0x0, 0x0, 0x0, 0x0, 0x4a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71,
3644            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3645            0x71, 0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x6, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71,
3646            0x71, 0x71, 0x71, 0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3647            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x3f, 0x71, 0x71, 0x71,
3648            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3649            0x71, 0x3b, 0x3f, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x20, 0x0, 0x0, 0x0, 0x0,
3650            0x0, 0x0, 0x0, 0x71, 0x75, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3651            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x10, 0x0, 0x71, 0x71,
3652            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x71, 0x71, 0x71, 0x71, 0x71,
3653            0x71, 0x76, 0x71, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x71, 0x71, 0x71, 0x71, 0x71,
3654            0x71, 0x71, 0x71, 0x10, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3655            0x71, 0x3b, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x76, 0x71, 0x34, 0x34, 0x34, 0x34,
3656            0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3657            0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x0, 0x0, 0x0, 0x0, 0x0, 0x34, 0x34, 0x34, 0x34,
3658            0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3659            0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3660            0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3661            0x34, 0x34, 0x30, 0x34, 0x34, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3662            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3663            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3664            0x0, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x6,
3665        ];
3666
3667        fuzz_based_test(
3668            INPUT,
3669            DeflateConfig {
3670                level: -1,
3671                method: Method::Deflated,
3672                window_bits: 9,
3673                mem_level: 1,
3674                strategy: Strategy::HuffmanOnly,
3675            },
3676            &[
3677                0x18, 0x19, 0x4, 0xc1, 0x21, 0x1, 0xc4, 0x0, 0x10, 0x3, 0xb0, 0x18, 0x29, 0x1e,
3678                0x7e, 0x17, 0x83, 0xf5, 0x70, 0x6c, 0xac, 0xfe, 0xc9, 0x27, 0xdb, 0xb6, 0x6f, 0xdb,
3679                0xb6, 0x6d, 0xdb, 0x80, 0x24, 0xb9, 0xbb, 0xbb, 0x24, 0x49, 0x92, 0x24, 0xf, 0x2,
3680                0xd8, 0x36, 0x0, 0xf0, 0x3, 0x0, 0x0, 0x24, 0xd0, 0xb6, 0x6d, 0xdb, 0xb6, 0x6d,
3681                0xdb, 0xbe, 0x6d, 0xf9, 0x13, 0x4, 0xc7, 0x4, 0x0, 0x80, 0x30, 0x0, 0xc3, 0x22,
3682                0x68, 0xf, 0x36, 0x90, 0xc2, 0xb5, 0xfa, 0x7f, 0x48, 0x80, 0x81, 0xb, 0x40, 0x55,
3683                0x55, 0x55, 0xd5, 0x16, 0x80, 0xaa, 0x7, 0x9, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3684                0xe, 0x7c, 0x82, 0xe0, 0x98, 0x0, 0x0, 0x0, 0x4, 0x60, 0x10, 0xf9, 0x8c, 0xe2,
3685                0xe5, 0xfa, 0x3f, 0x2, 0x54, 0x55, 0x55, 0x65, 0x0, 0xa8, 0xaa, 0xaa, 0xaa, 0xba,
3686                0x2, 0x50, 0xb5, 0x90, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x78, 0x82, 0xe0, 0xd0,
3687                0x8a, 0x41, 0x0, 0x0, 0xa2, 0x58, 0x54, 0xb7, 0x60, 0x83, 0x9a, 0x6a, 0x4, 0x96,
3688                0x87, 0xba, 0x51, 0xf8, 0xfb, 0x9b, 0x26, 0xfc, 0x0, 0x1c, 0x7, 0x6c, 0xdb, 0xb6,
3689                0x6d, 0xdb, 0xb6, 0x6d, 0xf7, 0xa8, 0x3a, 0xaf, 0xaa, 0x6a, 0x3, 0xf8, 0xc2, 0x3,
3690                0x40, 0x55, 0x55, 0x55, 0xd5, 0x5b, 0xf8, 0x80, 0xaa, 0x7a, 0xb, 0x0, 0x7f, 0x82,
3691                0xe0, 0x98, 0x0, 0x40, 0x18, 0x0, 0x82, 0xd8, 0x49, 0x40, 0x2, 0x22, 0x7e, 0xeb,
3692                0x80, 0xa6, 0xc, 0xa0, 0x9f, 0xa4, 0x2a, 0x38, 0xf, 0x0, 0x0, 0xe7, 0x1, 0xdc,
3693                0x55, 0x95, 0x17, 0x0, 0x0, 0xae, 0x0, 0x38, 0xc0, 0x67, 0xdb, 0x36, 0x80, 0x2b,
3694                0x0, 0xe, 0xf0, 0xd9, 0xf6, 0x13, 0x4, 0xc7, 0x4, 0x0, 0x0, 0x30, 0xc, 0x83, 0x22,
3695                0x69, 0x7, 0xc6, 0xea, 0xff, 0x19, 0x0, 0x0, 0x80, 0xaa, 0x0, 0x0, 0x0, 0x0, 0x0,
3696                0x0, 0x8e, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x6a,
3697                0xf5, 0x63, 0x60, 0x60, 0x3, 0x0, 0xee, 0x8a, 0x88, 0x67,
3698            ],
3699        )
3700    }
3701
3702    #[test]
3703    fn gzip_no_header() {
3704        let config = DeflateConfig {
3705            level: 9,
3706            method: Method::Deflated,
3707            window_bits: 31, // gzip
3708            ..Default::default()
3709        };
3710
3711        let input = b"Hello World!";
3712        let os = gz_header::OS_CODE;
3713
3714        fuzz_based_test(
3715            input,
3716            config,
3717            &[
3718                31, 139, 8, 0, 0, 0, 0, 0, 2, os, 243, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
3719                81, 4, 0, 163, 28, 41, 28, 12, 0, 0, 0,
3720            ],
3721        )
3722    }
3723
3724    #[test]
3725    #[rustfmt::skip]
3726    fn gzip_stored_block_checksum() {
3727        fuzz_based_test(
3728            &[
3729                27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 9, 0,
3730            ],
3731            DeflateConfig {
3732                level: 0,
3733                method: Method::Deflated,
3734                window_bits: 26,
3735                mem_level: 6,
3736                strategy: Strategy::Default,
3737            },
3738            &[
3739                31, 139, 8, 0, 0, 0, 0, 0, 4, gz_header::OS_CODE, 1, 18, 0, 237, 255, 27, 27, 27, 27, 27, 27, 27,
3740                27, 27, 27, 27, 27, 27, 27, 27, 27, 9, 0, 60, 101, 156, 55, 18, 0, 0, 0,
3741            ],
3742        )
3743    }
3744
3745    #[test]
3746    fn gzip_header_pending_flush() {
3747        let extra = "aaaaaaaaaaaaaaaaaaaa\0";
3748        let name = "bbbbbbbbbbbbbbbbbbbb\0";
3749        let comment = "cccccccccccccccccccc\0";
3750
3751        let mut header = gz_header {
3752            text: 0,
3753            time: 0,
3754            xflags: 0,
3755            os: 0,
3756            extra: extra.as_ptr() as *mut _,
3757            extra_len: extra.len() as _,
3758            extra_max: 0,
3759            name: name.as_ptr() as *mut _,
3760            name_max: 0,
3761            comment: comment.as_ptr() as *mut _,
3762            comm_max: 0,
3763            hcrc: 1,
3764            done: 0,
3765        };
3766
3767        let config = DeflateConfig {
3768            window_bits: 31,
3769            mem_level: 1,
3770            ..Default::default()
3771        };
3772
3773        let mut stream = z_stream::default();
3774        assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3775
3776        let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3777            unreachable!()
3778        };
3779
3780        unsafe { set_header(stream, Some(&mut header)) };
3781
3782        let input = b"Hello World\n";
3783        stream.next_in = input.as_ptr() as *mut _;
3784        stream.avail_in = input.len() as _;
3785
3786        let mut output = [0u8; 1024];
3787        stream.next_out = output.as_mut_ptr();
3788        stream.avail_out = 100;
3789
3790        assert_eq!(stream.state.bit_writer.pending.capacity(), 512);
3791
3792        // only 12 bytes remain, so to write the name the pending buffer must be flushed.
3793        // but there is insufficient output space to flush (only 100 bytes)
3794        stream.state.bit_writer.pending.extend(&[0; 500]);
3795
3796        assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::Ok);
3797
3798        // now try that again but with sufficient output space
3799        stream.avail_out = output.len() as _;
3800        assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3801
3802        let n = stream.total_out as usize;
3803
3804        assert!(end(stream).is_ok());
3805
3806        let output_rs = &mut output[..n];
3807
3808        assert_eq!(output_rs.len(), 500 + 99);
3809    }
3810
3811    #[test]
3812    fn gzip_with_header() {
3813        // this test is here mostly so we get some MIRI action on the gzip header. A test that
3814        // compares behavior with zlib-ng is in the libz-rs-sys test suite
3815
3816        let extra = "some extra stuff\0";
3817        let name = "nomen est omen\0";
3818        let comment = "such comment\0";
3819
3820        let mut header = gz_header {
3821            text: 0,
3822            time: 0,
3823            xflags: 0,
3824            os: 0,
3825            extra: extra.as_ptr() as *mut _,
3826            extra_len: extra.len() as _,
3827            extra_max: 0,
3828            name: name.as_ptr() as *mut _,
3829            name_max: 0,
3830            comment: comment.as_ptr() as *mut _,
3831            comm_max: 0,
3832            hcrc: 1,
3833            done: 0,
3834        };
3835
3836        let config = DeflateConfig {
3837            window_bits: 31,
3838            ..Default::default()
3839        };
3840
3841        let mut stream = z_stream::default();
3842        assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3843
3844        let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3845            unreachable!()
3846        };
3847
3848        unsafe { set_header(stream, Some(&mut header)) };
3849
3850        let input = b"Hello World\n";
3851        stream.next_in = input.as_ptr() as *mut _;
3852        stream.avail_in = input.len() as _;
3853
3854        let mut output = [0u8; 256];
3855        stream.next_out = output.as_mut_ptr();
3856        stream.avail_out = output.len() as _;
3857
3858        assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3859
3860        let n = stream.total_out as usize;
3861
3862        assert!(end(stream).is_ok());
3863
3864        let output_rs = &mut output[..n];
3865
3866        assert_eq!(output_rs.len(), 81);
3867
3868        {
3869            let mut stream = z_stream::default();
3870
3871            let config = InflateConfig {
3872                window_bits: config.window_bits,
3873            };
3874
3875            assert_eq!(crate::inflate::init(&mut stream, config), ReturnCode::Ok);
3876
3877            let Some(stream) = (unsafe { InflateStream::from_stream_mut(&mut stream) }) else {
3878                unreachable!();
3879            };
3880
3881            stream.next_in = output_rs.as_mut_ptr() as _;
3882            stream.avail_in = output_rs.len() as _;
3883
3884            let mut output = [0u8; 12];
3885            stream.next_out = output.as_mut_ptr();
3886            stream.avail_out = output.len() as _;
3887
3888            let mut extra_buf = [0u8; 64];
3889            let mut name_buf = [0u8; 64];
3890            let mut comment_buf = [0u8; 64];
3891
3892            let mut header = gz_header {
3893                text: 0,
3894                time: 0,
3895                xflags: 0,
3896                os: 0,
3897                extra: extra_buf.as_mut_ptr(),
3898                extra_len: 0,
3899                extra_max: extra_buf.len() as _,
3900                name: name_buf.as_mut_ptr(),
3901                name_max: name_buf.len() as _,
3902                comment: comment_buf.as_mut_ptr(),
3903                comm_max: comment_buf.len() as _,
3904                hcrc: 0,
3905                done: 0,
3906            };
3907
3908            assert_eq!(
3909                unsafe { crate::inflate::get_header(stream, Some(&mut header)) },
3910                ReturnCode::Ok
3911            );
3912
3913            assert_eq!(
3914                unsafe { crate::inflate::inflate(stream, InflateFlush::Finish) },
3915                ReturnCode::StreamEnd
3916            );
3917
3918            crate::inflate::end(stream);
3919
3920            assert!(!header.comment.is_null());
3921            assert_eq!(
3922                unsafe { CStr::from_ptr(header.comment.cast()) }
3923                    .to_str()
3924                    .unwrap(),
3925                comment.trim_end_matches('\0')
3926            );
3927
3928            assert!(!header.name.is_null());
3929            assert_eq!(
3930                unsafe { CStr::from_ptr(header.name.cast()) }
3931                    .to_str()
3932                    .unwrap(),
3933                name.trim_end_matches('\0')
3934            );
3935
3936            assert!(!header.extra.is_null());
3937            assert_eq!(
3938                unsafe { CStr::from_ptr(header.extra.cast()) }
3939                    .to_str()
3940                    .unwrap(),
3941                extra.trim_end_matches('\0')
3942            );
3943        }
3944    }
3945
3946    #[test]
3947    fn insufficient_compress_space() {
3948        const DATA: &[u8] = include_bytes!("deflate/test-data/inflate_buf_error.dat");
3949
3950        fn helper(deflate_buf: &mut [u8]) -> ReturnCode {
3951            let config = DeflateConfig {
3952                level: 0,
3953                method: Method::Deflated,
3954                window_bits: 10,
3955                mem_level: 6,
3956                strategy: Strategy::Default,
3957            };
3958
3959            let (output, err) = compress_slice(deflate_buf, DATA, config);
3960            assert_eq!(err, ReturnCode::Ok);
3961
3962            let config = InflateConfig {
3963                window_bits: config.window_bits,
3964            };
3965
3966            let mut uncompr = [0; 1 << 17];
3967            let (uncompr, err) = uncompress_slice(&mut uncompr, output, config);
3968
3969            if err == ReturnCode::Ok {
3970                assert_eq!(DATA, uncompr);
3971            }
3972
3973            err
3974        }
3975
3976        let mut output = [0; 1 << 17];
3977
3978        // this is too little space
3979        assert_eq!(helper(&mut output[..1 << 16]), ReturnCode::DataError);
3980
3981        // this is sufficient space
3982        assert_eq!(helper(&mut output), ReturnCode::Ok);
3983    }
3984
3985    fn test_flush(flush: DeflateFlush, expected: &[u8]) {
3986        let input = b"Hello World!\n";
3987
3988        let config = DeflateConfig {
3989            level: 6, // use gzip
3990            method: Method::Deflated,
3991            window_bits: 16 + crate::MAX_WBITS,
3992            mem_level: DEF_MEM_LEVEL,
3993            strategy: Strategy::Default,
3994        };
3995
3996        let mut output_rs = vec![0; 128];
3997
3998        // with the flush modes that we test here, the deflate process still has `Status::Busy`,
3999        // and the `deflateEnd` function will return `DataError`.
4000        let expected_err = ReturnCode::DataError;
4001
4002        let (rs, err) = compress_slice_with_flush(&mut output_rs, input, config, flush);
4003        assert_eq!(expected_err, err);
4004
4005        assert_eq!(rs, expected);
4006    }
4007
4008    #[test]
4009    #[rustfmt::skip]
4010    fn sync_flush() {
4011        test_flush(
4012            DeflateFlush::SyncFlush,
4013            &[
4014                31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4015                81, 228, 2, 0, 0, 0, 255, 255,
4016            ],
4017        )
4018    }
4019
4020    #[test]
4021    #[rustfmt::skip]
4022    fn partial_flush() {
4023        test_flush(
4024            DeflateFlush::PartialFlush,
4025            &[
4026                31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4027                81, 228, 2, 8,
4028            ],
4029        );
4030    }
4031
4032    #[test]
4033    #[rustfmt::skip]
4034    fn full_flush() {
4035        test_flush(
4036            DeflateFlush::FullFlush,
4037            &[
4038                31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4039                81, 228, 2, 0, 0, 0, 255, 255,
4040            ],
4041        );
4042    }
4043
4044    #[test]
4045    #[rustfmt::skip]
4046    fn block_flush() {
4047        test_flush(
4048            DeflateFlush::Block,
4049            &[
4050                31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4051                81, 228, 2,
4052            ],
4053        );
4054    }
4055
4056    #[test]
4057    // splits the input into two, deflates them seperately and then joins the deflated byte streams
4058    // into something that can be correctly inflated again. This is the basic idea behind pigz, and
4059    // allows for parallel compression.
4060    fn split_deflate() {
4061        let input = "Hello World!\n";
4062
4063        let (input1, input2) = input.split_at(6);
4064
4065        let mut output1 = vec![0; 128];
4066        let mut output2 = vec![0; 128];
4067
4068        let config = DeflateConfig {
4069            level: 6, // use gzip
4070            method: Method::Deflated,
4071            window_bits: 16 + crate::MAX_WBITS,
4072            mem_level: DEF_MEM_LEVEL,
4073            strategy: Strategy::Default,
4074        };
4075
4076        // see also the docs on `SyncFlush`. it makes sure everything is flushed, ends on a byte
4077        // boundary, and that the final block does not have the "last block" bit set.
4078        let (prefix, err) = compress_slice_with_flush(
4079            &mut output1,
4080            input1.as_bytes(),
4081            config,
4082            DeflateFlush::SyncFlush,
4083        );
4084        assert_eq!(err, ReturnCode::DataError);
4085
4086        let (output2, err) = compress_slice_with_flush(
4087            &mut output2,
4088            input2.as_bytes(),
4089            config,
4090            DeflateFlush::Finish,
4091        );
4092        assert_eq!(err, ReturnCode::Ok);
4093
4094        let inflate_config = crate::inflate::InflateConfig {
4095            window_bits: 16 + 15,
4096        };
4097
4098        // cuts off the length and crc
4099        let (suffix, end) = output2.split_at(output2.len() - 8);
4100        let (crc2, len2) = end.split_at(4);
4101        let crc2 = u32::from_le_bytes(crc2.try_into().unwrap());
4102
4103        // cuts off the gzip header (10 bytes) from the front
4104        let suffix = &suffix[10..];
4105
4106        let mut result: Vec<u8> = Vec::new();
4107        result.extend(prefix.iter());
4108        result.extend(suffix);
4109
4110        // it would be more proper to use `stream.total_in` here, but the slice helpers hide the
4111        // stream so we're cheating a bit here
4112        let len1 = input1.len() as u32;
4113        let len2 = u32::from_le_bytes(len2.try_into().unwrap());
4114        assert_eq!(len2 as usize, input2.len());
4115
4116        let crc1 = crate::crc32(0, input1.as_bytes());
4117        let crc = crate::crc32_combine(crc1, crc2, len2 as u64);
4118
4119        // combined crc of the parts should be the crc of the whole
4120        let crc_cheating = crate::crc32(0, input.as_bytes());
4121        assert_eq!(crc, crc_cheating);
4122
4123        // write the trailer
4124        result.extend(crc.to_le_bytes());
4125        result.extend((len1 + len2).to_le_bytes());
4126
4127        let mut output = vec![0; 128];
4128        let (output, err) = crate::inflate::uncompress_slice(&mut output, &result, inflate_config);
4129        assert_eq!(err, ReturnCode::Ok);
4130
4131        assert_eq!(output, input.as_bytes());
4132    }
4133
4134    #[test]
4135    fn inflate_window_copy_slice() {
4136        let uncompressed = [
4137            9, 126, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 76, 33, 8, 2, 0, 0,
4138            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4139            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4140            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4141            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12,
4142            10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 76, 33, 8, 2, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0,
4143            0, 0, 0, 12, 10, 0, 0, 0, 0, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4144            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69,
4145            69, 69, 69, 69, 69, 69, 69, 69, 9, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4146            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4147            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4148            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 12, 28, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0,
4149            0, 0, 12, 10, 0, 0, 0, 0, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4150            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69,
4151            69, 69, 69, 69, 69, 69, 69, 69, 9, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4152            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4153            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4154            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 12, 28, 0, 2, 0, 0, 0, 63, 1, 0, 12, 2,
4155            36, 0, 28, 0, 0, 0, 1, 0, 0, 63, 63, 13, 0, 0, 0, 0, 0, 0, 0, 63, 63, 63, 63, 0, 0, 0,
4156            0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 0,
4157            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 45, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4158            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4159            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 91, 0, 0, 0, 9, 0, 0, 0, 9, 0, 0, 12, 33, 2, 0, 0, 8,
4160            0, 4, 0, 0, 0, 12, 10, 41, 12, 10, 47,
4161        ];
4162
4163        let compressed = &[
4164            31, 139, 8, 0, 0, 0, 0, 0, 4, 3, 181, 193, 49, 14, 194, 32, 24, 128, 209, 175, 192, 0,
4165            228, 151, 232, 206, 66, 226, 226, 96, 60, 2, 113, 96, 235, 13, 188, 139, 103, 23, 106,
4166            104, 108, 100, 49, 169, 239, 185, 39, 11, 199, 7, 51, 39, 171, 248, 118, 226, 63, 52,
4167            157, 120, 86, 102, 78, 86, 209, 104, 58, 241, 84, 129, 166, 12, 4, 154, 178, 229, 202,
4168            30, 36, 130, 166, 19, 79, 21, 104, 202, 64, 160, 41, 91, 174, 236, 65, 34, 10, 200, 19,
4169            162, 206, 68, 96, 130, 156, 15, 188, 229, 138, 197, 157, 161, 35, 3, 87, 126, 245, 0,
4170            28, 224, 64, 146, 2, 139, 1, 196, 95, 196, 223, 94, 10, 96, 92, 33, 86, 2, 0, 0,
4171        ];
4172
4173        let config = InflateConfig { window_bits: 25 };
4174
4175        let mut dest_vec_rs = vec![0u8; uncompressed.len()];
4176        let (output_rs, error) =
4177            crate::inflate::uncompress_slice(&mut dest_vec_rs, compressed, config);
4178
4179        assert_eq!(ReturnCode::Ok, error);
4180        assert_eq!(output_rs, uncompressed);
4181    }
4182
4183    #[test]
4184    fn hash_calc_difference() {
4185        let input = [
4186            0, 0, 0, 0, 0, 43, 0, 0, 0, 0, 0, 0, 43, 0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0,
4187            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4188            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4189            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4190            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 0, 0, 0,
4191            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4192            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0,
4193            0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 64, 0, 0,
4194            0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 102, 102, 102, 102, 102,
4195            102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102,
4196            102, 102, 102, 102, 102, 102, 102, 102, 112, 102, 102, 102, 102, 102, 102, 102, 102,
4197            102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 0,
4198            0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4199            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4200            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4201            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4202            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4203            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0,
4204            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 64, 0, 0, 0, 0, 0, 0, 0,
4205            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 0, 0, 0,
4206            50, 0,
4207        ];
4208
4209        let config = DeflateConfig {
4210            level: 6,
4211            method: Method::Deflated,
4212            window_bits: 9,
4213            mem_level: 8,
4214            strategy: Strategy::Default,
4215        };
4216
4217        let expected = [
4218            24, 149, 99, 96, 96, 96, 96, 208, 6, 17, 112, 138, 129, 193, 128, 1, 29, 24, 50, 208,
4219            1, 200, 146, 169, 79, 24, 74, 59, 96, 147, 52, 71, 22, 70, 246, 88, 26, 94, 80, 128,
4220            83, 6, 162, 219, 144, 76, 183, 210, 5, 8, 67, 105, 36, 159, 35, 128, 57, 118, 97, 100,
4221            160, 197, 192, 192, 96, 196, 0, 0, 3, 228, 25, 128,
4222        ];
4223
4224        fuzz_based_test(&input, config, &expected);
4225    }
4226
4227    #[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))]
4228    mod _cache_lines {
4229        use super::State;
4230        // FIXME: once zlib-rs Minimum Supported Rust Version >= 1.77, switch to core::mem::offset_of
4231        // and move this _cache_lines module from up a level from tests to super::
4232        use memoffset::offset_of;
4233
4234        const _: () = assert!(offset_of!(State, status) == 0);
4235        const _: () = assert!(offset_of!(State, _cache_line_0) == 64);
4236        const _: () = assert!(offset_of!(State, _cache_line_1) == 128);
4237        const _: () = assert!(offset_of!(State, _cache_line_2) == 192);
4238        const _: () = assert!(offset_of!(State, _cache_line_3) == 256);
4239    }
4240}