1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193
use std::ops::Deref;
use gix_features::zlib;
use gix_hash::oid;
use super::find::Error;
use crate::{
find::Header,
store::{find::error::DeltaBaseRecursion, handle, load_index},
};
impl<S> super::Handle<S>
where
S: Deref<Target = super::Store> + Clone,
{
pub(crate) fn try_header_inner<'b>(
&'b self,
mut id: &'b gix_hash::oid,
inflate: &mut zlib::Inflate,
snapshot: &mut load_index::Snapshot,
recursion: Option<DeltaBaseRecursion<'_>>,
) -> Result<Option<Header>, Error> {
if let Some(r) = recursion {
if r.depth >= self.max_recursion_depth {
return Err(Error::DeltaBaseRecursionLimit {
max_depth: self.max_recursion_depth,
id: r.original_id.to_owned(),
});
}
} else if !self.ignore_replacements {
if let Ok(pos) = self
.store
.replacements
.binary_search_by(|(map_this, _)| map_this.as_ref().cmp(id))
{
id = self.store.replacements[pos].1.as_ref();
}
}
'outer: loop {
{
let marker = snapshot.marker;
for (idx, index) in snapshot.indices.iter_mut().enumerate() {
if let Some(handle::index_lookup::Outcome {
object_index: handle::IndexForObjectInPack { pack_id, pack_offset },
index_file,
pack: possibly_pack,
}) = index.lookup(id)
{
let pack = match possibly_pack {
Some(pack) => pack,
None => match self.store.load_pack(pack_id, marker)? {
Some(pack) => {
*possibly_pack = Some(pack);
possibly_pack.as_deref().expect("just put it in")
}
None => {
// The pack wasn't available anymore so we are supposed to try another round with a fresh index
match self.store.load_one_index(self.refresh, snapshot.marker)? {
Some(new_snapshot) => {
*snapshot = new_snapshot;
self.clear_cache();
continue 'outer;
}
None => {
// nothing new in the index, kind of unexpected to not have a pack but to also
// to have no new index yet. We set the new index before removing any slots, so
// this should be observable.
return Ok(None);
}
}
}
},
};
let entry = pack.entry(pack_offset)?;
let res = match pack.decode_header(entry, inflate, &|id| {
index_file.pack_offset_by_id(id).and_then(|pack_offset| {
pack.entry(pack_offset)
.ok()
.map(gix_pack::data::decode::header::ResolvedBase::InPack)
})
}) {
Ok(header) => Ok(header.into()),
Err(gix_pack::data::decode::Error::DeltaBaseUnresolved(base_id)) => {
// Only with multi-pack indices it's allowed to jump to refer to other packs within this
// multi-pack. Otherwise this would constitute a thin pack which is only allowed in transit.
// However, if we somehow end up with that, we will resolve it safely, even though we could
// avoid handling this case and error instead.
let hdr = self
.try_header_inner(
&base_id,
inflate,
snapshot,
recursion
.map(DeltaBaseRecursion::inc_depth)
.or_else(|| DeltaBaseRecursion::new(id).into()),
)
.map_err(|err| Error::DeltaBaseLookup {
err: Box::new(err),
base_id,
id: id.to_owned(),
})?
.ok_or_else(|| Error::DeltaBaseMissing {
base_id,
id: id.to_owned(),
})?;
let handle::index_lookup::Outcome {
object_index:
handle::IndexForObjectInPack {
pack_id: _,
pack_offset,
},
index_file,
pack: possibly_pack,
} = match snapshot.indices[idx].lookup(id) {
Some(res) => res,
None => {
let mut out = None;
for index in &mut snapshot.indices {
out = index.lookup(id);
if out.is_some() {
break;
}
}
out.unwrap_or_else(|| {
panic!("could not find object {id} in any index after looking up one of its base objects {base_id}")
})
}
};
let pack = possibly_pack
.as_ref()
.expect("pack to still be available like just now");
let entry = pack.entry(pack_offset)?;
pack.decode_header(entry, inflate, &|id| {
index_file
.pack_offset_by_id(id)
.and_then(|pack_offset| {
pack.entry(pack_offset)
.ok()
.map(gix_pack::data::decode::header::ResolvedBase::InPack)
})
.or_else(|| {
(id == base_id).then(|| {
gix_pack::data::decode::header::ResolvedBase::OutOfPack {
kind: hdr.kind(),
num_deltas: hdr.num_deltas(),
}
})
})
})
.map(Into::into)
}
Err(err) => Err(err),
}?;
if idx != 0 {
snapshot.indices.swap(0, idx);
}
return Ok(Some(res));
}
}
}
for lodb in snapshot.loose_dbs.iter() {
// TODO: remove this double-lookup once the borrow checker allows it.
if lodb.contains(id) {
return lodb.try_header(id).map(|opt| opt.map(Into::into)).map_err(Into::into);
}
}
match self.store.load_one_index(self.refresh, snapshot.marker)? {
Some(new_snapshot) => {
*snapshot = new_snapshot;
self.clear_cache();
}
None => return Ok(None),
}
}
}
}
impl<S> crate::Header for super::Handle<S>
where
S: Deref<Target = super::Store> + Clone,
{
fn try_header(&self, id: &oid) -> Result<Option<Header>, gix_object::find::Error> {
let mut snapshot = self.snapshot.borrow_mut();
let mut inflate = self.inflate.borrow_mut();
self.try_header_inner(id, &mut inflate, &mut snapshot, None)
.map_err(|err| Box::new(err) as _)
}
}