1use std::any::{Any, TypeId};
7use std::sync::Arc;
8
9use futures::Future;
10use moka::sync::Cache;
11use object_store::path::Path;
12
13use crate::utils::path::LancePathExt;
14use crate::Result;
15
16pub use deepsize::{Context, DeepSizeOf};
17
18type ArcAny = Arc<dyn Any + Send + Sync>;
19
20#[derive(Clone)]
21struct SizedRecord {
22 record: ArcAny,
23 size_accessor: Arc<dyn Fn(&ArcAny) -> usize + Send + Sync>,
24}
25
26impl std::fmt::Debug for SizedRecord {
27 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
28 f.debug_struct("SizedRecord")
29 .field("record", &self.record)
30 .finish()
31 }
32}
33
34impl SizedRecord {
35 fn new<T: DeepSizeOf + Send + Sync + 'static>(record: Arc<T>) -> Self {
36 let size_accessor =
37 |record: &ArcAny| -> usize { record.downcast_ref::<T>().unwrap().deep_size_of() };
38 Self {
39 record,
40 size_accessor: Arc::new(size_accessor),
41 }
42 }
43}
44
45#[derive(Clone, Debug)]
49pub struct FileMetadataCache {
50 cache: Option<Arc<Cache<(Path, TypeId), SizedRecord>>>,
51 base_path: Option<Path>,
52}
53
54impl DeepSizeOf for FileMetadataCache {
55 fn deep_size_of_children(&self, _: &mut Context) -> usize {
56 self.cache
57 .as_ref()
58 .map(|cache| {
59 cache
60 .iter()
61 .map(|(_, v)| (v.size_accessor)(&v.record))
62 .sum()
63 })
64 .unwrap_or(0)
65 }
66}
67
68pub enum CapacityMode {
69 Items,
70 Bytes,
71}
72
73impl FileMetadataCache {
74 pub fn new(capacity: usize) -> Self {
76 Self {
77 cache: Some(Arc::new(Cache::new(capacity as u64))),
78 base_path: None,
79 }
80 }
81
82 pub fn no_cache() -> Self {
84 Self {
85 cache: None,
86 base_path: None,
87 }
88 }
89
90 pub fn with_capacity(capacity: usize, mode: CapacityMode) -> Self {
92 match mode {
93 CapacityMode::Items => Self::new(capacity),
94 CapacityMode::Bytes => Self {
95 cache: Some(Arc::new(
96 Cache::builder()
97 .weigher(|_, v: &SizedRecord| {
98 (v.size_accessor)(&v.record).try_into().unwrap_or(u32::MAX)
99 })
100 .build(),
101 )),
102 base_path: None,
103 },
104 }
105 }
106
107 pub fn with_base_path(&self, base_path: Path) -> Self {
110 Self {
111 cache: self.cache.clone(),
112 base_path: Some(base_path),
113 }
114 }
115
116 pub fn size(&self) -> usize {
117 if let Some(cache) = self.cache.as_ref() {
118 cache.run_pending_tasks();
119 cache.entry_count() as usize
120 } else {
121 0
122 }
123 }
124
125 pub fn get_by_str<T: Send + Sync + 'static>(&self, path: &str) -> Option<Arc<T>> {
127 self.get(&Path::parse(path).unwrap())
128 }
129
130 pub fn get<T: Send + Sync + 'static>(&self, path: &Path) -> Option<Arc<T>> {
132 let cache = self.cache.as_ref()?;
133 let temp: Path;
134 let path = if let Some(base_path) = &self.base_path {
135 temp = base_path.child_path(path);
136 &temp
137 } else {
138 path
139 };
140 cache
141 .get(&(path.to_owned(), TypeId::of::<T>()))
142 .map(|metadata| metadata.record.clone().downcast::<T>().unwrap())
143 }
144
145 pub fn insert<T: DeepSizeOf + Send + Sync + 'static>(&self, path: Path, metadata: Arc<T>) {
147 let Some(cache) = self.cache.as_ref() else {
148 return;
149 };
150 let path = if let Some(base_path) = &self.base_path {
151 base_path.child_path(&path)
152 } else {
153 path
154 };
155 cache.insert((path, TypeId::of::<T>()), SizedRecord::new(metadata));
156 }
157
158 pub fn insert_by_str<T: DeepSizeOf + Send + Sync + 'static>(
160 &self,
161 key: &str,
162 metadata: Arc<T>,
163 ) {
164 self.insert(Path::parse(key).unwrap(), metadata);
165 }
166
167 pub async fn get_or_insert<T: DeepSizeOf + Send + Sync + 'static, F, Fut>(
173 &self,
174 path: &Path,
175 loader: F,
176 ) -> Result<Arc<T>>
177 where
178 F: Fn(&Path) -> Fut,
179 Fut: Future<Output = Result<T>>,
180 {
181 if let Some(metadata) = self.get::<T>(path) {
182 return Ok(metadata);
183 }
184
185 let metadata = Arc::new(loader(path).await?);
186 self.insert(path.to_owned(), metadata.clone());
187 Ok(metadata)
188 }
189}