1use lock_api::RawRwLock;
2use std::{cell::UnsafeCell, collections::hash_map::HashMap, fmt, hash, ops};
3
4pub struct StorageMap<L, M> {
7 lock: L,
8 map: UnsafeCell<M>,
9}
10
11unsafe impl<L: Send, M> Send for StorageMap<L, M> {}
12unsafe impl<L: Sync, M> Sync for StorageMap<L, M> {}
13
14impl<L: RawRwLock, M: Default> Default for StorageMap<L, M> {
15 fn default() -> Self {
16 StorageMap {
17 lock: L::INIT,
18 map: UnsafeCell::new(M::default()),
19 }
20 }
21}
22
23impl<L, M: fmt::Debug> fmt::Debug for StorageMap<L, M> {
24 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
25 self.map.get().fmt(formatter)
26 }
27}
28
29pub struct StorageMapGuard<'a, L: 'a + RawRwLock, V: 'a> {
31 lock: &'a L,
32 value: &'a V,
33 exclusive: bool,
34}
35
36impl<'a, L: RawRwLock, V> ops::Deref for StorageMapGuard<'a, L, V> {
37 type Target = V;
38 fn deref(&self) -> &V {
39 self.value
40 }
41}
42
43impl<'a, L: RawRwLock, V> Drop for StorageMapGuard<'a, L, V> {
44 fn drop(&mut self) {
45 unsafe {
46 if self.exclusive {
47 self.lock.unlock_exclusive();
48 } else {
49 self.lock.unlock_shared();
50 }
51 }
52 }
53}
54
55pub struct WholeMapWriteGuard<'a, L: 'a + RawRwLock, M: 'a> {
56 lock: &'a L,
57 map: &'a mut M,
58}
59
60impl<'a, L: RawRwLock, M> ops::Deref for WholeMapWriteGuard<'a, L, M> {
61 type Target = M;
62 fn deref(&self) -> &M {
63 self.map
64 }
65}
66
67impl<'a, L: RawRwLock, M> ops::DerefMut for WholeMapWriteGuard<'a, L, M> {
68 fn deref_mut(&mut self) -> &mut M {
69 self.map
70 }
71}
72
73impl<'a, L: RawRwLock, V> Drop for WholeMapWriteGuard<'a, L, V> {
74 fn drop(&mut self) {
75 unsafe {
76 self.lock.unlock_exclusive();
77 }
78 }
79}
80
81pub enum PrepareResult {
83 AlreadyExists,
85 UnableToCreate,
87 Created,
89}
90
91impl<L, K, V, S> StorageMap<L, HashMap<K, V, S>>
92where
93 L: RawRwLock,
94 K: Clone + Eq + hash::Hash,
95 S: hash::BuildHasher,
96{
97 pub fn with_hasher(hash_builder: S) -> Self {
99 StorageMap {
100 lock: L::INIT,
101 map: UnsafeCell::new(HashMap::with_hasher(hash_builder)),
102 }
103 }
104
105 pub fn get_or_create_with<'a, F: FnOnce() -> V>(
109 &'a self,
110 key: &K,
111 create_fn: F,
112 ) -> StorageMapGuard<'a, L, V> {
113 self.lock.lock_shared();
114 let map = unsafe { &*self.map.get() };
116 if let Some(value) = map.get(key) {
117 return StorageMapGuard {
118 lock: &self.lock,
119 value,
120 exclusive: false,
121 };
122 }
123 unsafe {
124 self.lock.unlock_shared();
125 }
126 let value = create_fn();
128 self.lock.lock_exclusive();
129 let map = unsafe { &mut *self.map.get() };
130 StorageMapGuard {
131 lock: &self.lock,
132 value: &*map.entry(key.clone()).or_insert(value),
133 exclusive: true,
134 }
135 }
136
137 pub fn prepare_maybe<F: FnOnce() -> Option<V>>(&self, key: &K, create_fn: F) -> PrepareResult {
140 self.lock.lock_shared();
141 let map = unsafe { &*self.map.get() };
143 let has = map.contains_key(key);
144 unsafe {
145 self.lock.unlock_shared();
146 }
147 if has {
148 return PrepareResult::AlreadyExists;
149 }
150 let value = match create_fn() {
152 Some(value) => value,
153 None => return PrepareResult::UnableToCreate,
154 };
155 self.lock.lock_exclusive();
157 let map = unsafe { &mut *self.map.get() };
158 map.insert(key.clone(), value);
159 unsafe {
160 self.lock.unlock_exclusive();
161 }
162 PrepareResult::Created
163 }
164
165 pub fn whole_write(&self) -> WholeMapWriteGuard<L, HashMap<K, V, S>> {
167 self.lock.lock_exclusive();
168 WholeMapWriteGuard {
169 lock: &self.lock,
170 map: unsafe { &mut *self.map.get() },
171 }
172 }
173}