1use std::hash::{BuildHasher as _, Hash};
2use std::ops::Index;
3
4use hashbrown::hash_table::{Entry, HashTable};
5use hashbrown::DefaultHashBuilder as RandomState;
6
7#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
17#[repr(transparent)]
18pub struct Token(pub u32);
19
20impl From<u32> for Token {
21 fn from(token: u32) -> Self {
22 Token(token)
23 }
24}
25
26impl From<Token> for u32 {
27 fn from(token: Token) -> Self {
28 token.0
29 }
30}
31
32pub trait TokenSource {
33 type Token: Hash + Eq;
34 type Tokenizer: Iterator<Item = Self::Token>;
35 fn tokenize(&self) -> Self::Tokenizer;
36 fn estimate_tokens(&self) -> u32;
37}
38
39#[derive(Default)]
49pub struct InternedInput<T> {
50 pub before: Vec<Token>,
51 pub after: Vec<Token>,
52 pub interner: Interner<T>,
53}
54
55impl<T> InternedInput<T> {
56 pub fn clear(&mut self) {
57 self.before.clear();
58 self.after.clear();
59 self.interner.clear();
60 }
61}
62
63impl<T: Eq + Hash> InternedInput<T> {
64 pub fn new<I: TokenSource<Token = T>>(before: I, after: I) -> Self {
65 let token_estimate_before = before.estimate_tokens() as usize;
66 let token_estimate_after = after.estimate_tokens() as usize;
67 let mut res = Self {
68 before: Vec::with_capacity(token_estimate_before),
69 after: Vec::with_capacity(token_estimate_after),
70 interner: Interner::new(token_estimate_before + token_estimate_after),
71 };
72 res.update_before(before.tokenize());
73 res.update_after(after.tokenize());
74 res
75 }
76
77 pub fn update_before(&mut self, input: impl Iterator<Item = T>) {
82 self.before.clear();
83 self.before
84 .extend(input.map(|token| self.interner.intern(token)));
85 }
86
87 pub fn update_after(&mut self, input: impl Iterator<Item = T>) {
93 self.after.clear();
94 self.after
95 .extend(input.map(|token| self.interner.intern(token)));
96 }
97}
98
99#[derive(Default)]
101pub struct Interner<T> {
102 tokens: Vec<T>,
103 table: HashTable<Token>,
104 hasher: RandomState,
105}
106
107impl<T> Interner<T> {
108 pub fn new_for_token_source<S: TokenSource<Token = T>>(before: &S, after: &S) -> Self {
111 Self::new(before.estimate_tokens() as usize + after.estimate_tokens() as usize)
112 }
113
114 pub fn new(capacity: usize) -> Interner<T> {
116 Interner {
117 tokens: Vec::with_capacity(capacity),
118 table: HashTable::with_capacity(capacity),
119 hasher: RandomState::default(),
120 }
121 }
122
123 pub fn clear(&mut self) {
125 self.table.clear();
126 self.tokens.clear();
127 }
128
129 pub fn num_tokens(&self) -> u32 {
131 self.tokens.len() as u32
132 }
133}
134
135impl<T: Hash + Eq> Interner<T> {
136 pub fn intern(&mut self, token: T) -> Token {
138 let hash = self.hasher.hash_one(&token);
139 match self.table.entry(
140 hash,
141 |&it| self.tokens[it.0 as usize] == token,
142 |&token| self.hasher.hash_one(&self.tokens[token.0 as usize]),
143 ) {
144 Entry::Occupied(entry) => *entry.get(),
145 Entry::Vacant(entry) => {
146 let interned = Token(self.tokens.len() as u32);
147 entry.insert(interned);
148 self.tokens.push(token);
149 interned
150 }
151 }
152 }
153
154 pub fn erase_tokens_after(&mut self, first_erased_token: Token) {
156 assert!(first_erased_token.0 <= self.tokens.len() as u32);
157 let retained = first_erased_token.0 as usize;
158 let erased = self.tokens.len() - retained;
159 if retained <= erased {
160 self.table.clear();
161 for (i, token) in self.tokens[0..retained].iter().enumerate() {
162 let hash = self.hasher.hash_one(token);
163 self.table.insert_unique(hash, Token(i as u32), |&token| {
164 self.hasher.hash_one(&self.tokens[token.0 as usize])
165 });
166 }
167 } else {
168 for (i, token) in self.tokens[retained..].iter().enumerate() {
169 let hash = self.hasher.hash_one(token);
170 match self
171 .table
172 .find_entry(hash, |token| token.0 == (retained + i) as u32)
173 {
174 Ok(occupied) => drop(occupied.remove()),
175 Err(_absent) => unreachable!(),
176 }
177 }
178 }
179 self.tokens.truncate(first_erased_token.0 as usize);
180 }
181}
182
183impl<T> Index<Token> for Interner<T> {
184 type Output = T;
185 fn index(&self, index: Token) -> &Self::Output {
186 &self.tokens[index.0 as usize]
187 }
188}