1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
use crate::store_impl::packed;

impl AsRef<[u8]> for packed::Buffer {
    fn as_ref(&self) -> &[u8] {
        &self.data.as_ref()[self.offset..]
    }
}

impl AsRef<[u8]> for packed::Backing {
    fn as_ref(&self) -> &[u8] {
        match self {
            packed::Backing::InMemory(data) => data,
            packed::Backing::Mapped(map) => map,
        }
    }
}

///
pub mod open {
    use std::path::PathBuf;

    use memmap2::Mmap;
    use winnow::{prelude::*, stream::Offset};

    use crate::store_impl::packed;

    /// Initialization
    impl packed::Buffer {
        fn open_with_backing(backing: packed::Backing, path: PathBuf) -> Result<Self, Error> {
            let (backing, offset) = {
                let (offset, sorted) = {
                    let mut input = backing.as_ref();
                    if *input.first().unwrap_or(&b' ') == b'#' {
                        let header = packed::decode::header::<()>
                            .parse_next(&mut input)
                            .map_err(|_| Error::HeaderParsing)?;
                        let offset = input.offset_from(&backing.as_ref());
                        (offset, header.sorted)
                    } else {
                        (0, false)
                    }
                };

                if !sorted {
                    // this implementation is likely slower than what git does, but it's less code, too.
                    let mut entries = packed::Iter::new(&backing.as_ref()[offset..])?.collect::<Result<Vec<_>, _>>()?;
                    entries.sort_by_key(|e| e.name.as_bstr());
                    let mut serialized = Vec::<u8>::new();
                    for entry in entries {
                        serialized.extend_from_slice(entry.target);
                        serialized.push(b' ');
                        serialized.extend_from_slice(entry.name.as_bstr());
                        serialized.push(b'\n');
                        if let Some(object) = entry.object {
                            serialized.push(b'^');
                            serialized.extend_from_slice(object);
                            serialized.push(b'\n');
                        }
                    }
                    (Backing::InMemory(serialized), 0)
                } else {
                    (backing, offset)
                }
            };
            Ok(packed::Buffer {
                offset,
                data: backing,
                path,
            })
        }

        /// Open the file at `path` and map it into memory if the file size is larger than `use_memory_map_if_larger_than_bytes`.
        ///
        /// In order to allow fast lookups and optimizations, the contents of the packed refs must be sorted.
        /// If that's not the case, they will be sorted on the fly with the data being written into a memory buffer.
        pub fn open(path: PathBuf, use_memory_map_if_larger_than_bytes: u64) -> Result<Self, Error> {
            let backing = if std::fs::metadata(&path)?.len() <= use_memory_map_if_larger_than_bytes {
                packed::Backing::InMemory(std::fs::read(&path)?)
            } else {
                packed::Backing::Mapped(
                    // SAFETY: we have to take the risk of somebody changing the file underneath. Git never writes into the same file.
                    #[allow(unsafe_code)]
                    unsafe {
                        Mmap::map(&std::fs::File::open(&path)?)?
                    },
                )
            };
            Self::open_with_backing(backing, path)
        }

        /// Open a buffer from `bytes`, which is the content of a typical `packed-refs` file.
        ///
        /// In order to allow fast lookups and optimizations, the contents of the packed refs must be sorted.
        /// If that's not the case, they will be sorted on the fly.
        pub fn from_bytes(bytes: &[u8]) -> Result<Self, Error> {
            let backing = packed::Backing::InMemory(bytes.into());
            Self::open_with_backing(backing, PathBuf::from("<memory>"))
        }
    }

    mod error {
        use crate::packed;

        /// The error returned by [`open()`][super::packed::Buffer::open()].
        #[derive(Debug, thiserror::Error)]
        #[allow(missing_docs)]
        pub enum Error {
            #[error("The packed-refs file did not have a header or wasn't sorted and could not be iterated")]
            Iter(#[from] packed::iter::Error),
            #[error("The header could not be parsed, even though first line started with '#'")]
            HeaderParsing,
            #[error("The buffer could not be opened or read")]
            Io(#[from] std::io::Error),
        }
    }
    pub use error::Error;

    use crate::packed::Backing;
}