1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
use std::io;
/// Returned by [`BytesToEntriesIter::new_from_header()`][crate::data::input::BytesToEntriesIter::new_from_header()] and as part
/// of `Item` of [`BytesToEntriesIter`][crate::data::input::BytesToEntriesIter].
#[derive(thiserror::Error, Debug)]
#[allow(missing_docs)]
pub enum Error {
#[error("An IO operation failed while streaming an entry")]
Io(#[from] io::Error),
#[error(transparent)]
PackParse(#[from] crate::data::header::decode::Error),
#[error("pack checksum in trailer was {expected}, but actual checksum was {actual}")]
ChecksumMismatch {
expected: gix_hash::ObjectId,
actual: gix_hash::ObjectId,
},
#[error("pack is incomplete: it was decompressed into {actual} bytes but {expected} bytes where expected.")]
IncompletePack { actual: u64, expected: u64 },
#[error("The object {object_id} could not be decoded or wasn't found")]
NotFound { object_id: gix_hash::ObjectId },
}
/// Iteration Mode
#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone, Copy)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum Mode {
/// Provide the trailer as read from the pack
AsIs,
/// Generate an own hash and trigger an error on the last iterated object
/// if it does not match the hash provided with the pack.
///
/// This way the one iterating the data cannot miss corruption as long as
/// the iteration is continued through to the end.
Verify,
/// Generate an own hash and if there was an error or the objects are depleted early
/// due to partial packs, return the last valid entry and with our own hash thus far.
/// Note that the existing pack hash, if present, will be ignored.
/// As we won't know which objects fails, every object will have the hash obtained thus far.
/// This also means that algorithms must know about this possibility, or else might wrongfully
/// assume the pack is finished.
Restore,
}
/// Define what to do with the compressed bytes portion of a pack [`Entry`][super::Entry]
#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone, Copy)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum EntryDataMode {
/// Do nothing with the compressed bytes we read
Ignore,
/// Only create a CRC32 of the entry, otherwise similar to `Ignore`
Crc32,
/// Keep them and pass them along in a newly allocated buffer
Keep,
/// As above, but also compute a CRC32
KeepAndCrc32,
}
impl EntryDataMode {
/// Returns true if a crc32 should be computed
pub fn crc32(&self) -> bool {
match self {
EntryDataMode::KeepAndCrc32 | EntryDataMode::Crc32 => true,
EntryDataMode::Keep | EntryDataMode::Ignore => false,
}
}
/// Returns true if compressed bytes should be kept
pub fn keep(&self) -> bool {
match self {
EntryDataMode::Keep | EntryDataMode::KeepAndCrc32 => true,
EntryDataMode::Ignore | EntryDataMode::Crc32 => false,
}
}
}