1use crate::{pointer_alignment_padding, type_hash::TypeHash, Finalize};
2use smallvec::SmallVec;
3use std::{
4 alloc::Layout,
5 collections::{hash_map::Entry, HashMap},
6 ops::Range,
7};
8
9#[derive(Debug, Copy, Clone)]
10struct DataStackFinalizer {
11 callback: unsafe fn(*mut ()),
12 layout: Layout,
13}
14
15#[derive(Debug, Copy, Clone)]
16struct DataStackRegisterTag {
17 type_hash: TypeHash,
18 layout: Layout,
19 finalizer: Option<unsafe fn(*mut ())>,
20 padding: u8,
21}
22
23pub struct DataStackToken(usize);
24
25impl DataStackToken {
26 pub unsafe fn new(position: usize) -> Self {
28 Self(position)
29 }
30}
31
32pub struct DataStackRegisterAccess<'a> {
33 stack: &'a mut DataStack,
34 position: usize,
35}
36
37impl<'a> DataStackRegisterAccess<'a> {
38 pub fn type_hash(&self) -> TypeHash {
39 unsafe {
40 self.stack
41 .memory
42 .as_ptr()
43 .add(self.position)
44 .cast::<DataStackRegisterTag>()
45 .read_unaligned()
46 .type_hash
47 }
48 }
49
50 pub fn layout(&self) -> Layout {
51 unsafe {
52 self.stack
53 .memory
54 .as_ptr()
55 .add(self.position)
56 .cast::<DataStackRegisterTag>()
57 .read_unaligned()
58 .layout
59 }
60 }
61
62 pub fn type_hash_layout(&self) -> (TypeHash, Layout) {
63 unsafe {
64 let tag = self
65 .stack
66 .memory
67 .as_ptr()
68 .add(self.position)
69 .cast::<DataStackRegisterTag>()
70 .read_unaligned();
71 (tag.type_hash, tag.layout)
72 }
73 }
74
75 pub fn has_value(&self) -> bool {
76 unsafe {
77 self.stack
78 .memory
79 .as_ptr()
80 .add(self.position)
81 .cast::<DataStackRegisterTag>()
82 .read_unaligned()
83 .finalizer
84 .is_some()
85 }
86 }
87
88 pub fn read<T: 'static>(&'a self) -> Option<&'a T> {
89 unsafe {
90 let tag = self
91 .stack
92 .memory
93 .as_ptr()
94 .add(self.position)
95 .cast::<DataStackRegisterTag>()
96 .read_unaligned();
97 if tag.type_hash == TypeHash::of::<T>() && tag.finalizer.is_some() {
98 self.stack
99 .memory
100 .as_ptr()
101 .add(self.position - tag.layout.size())
102 .cast::<T>()
103 .as_ref()
104 } else {
105 None
106 }
107 }
108 }
109
110 pub fn write<T: 'static>(&'a mut self) -> Option<&'a mut T> {
111 unsafe {
112 let tag = self
113 .stack
114 .memory
115 .as_ptr()
116 .add(self.position)
117 .cast::<DataStackRegisterTag>()
118 .read_unaligned();
119 if tag.type_hash == TypeHash::of::<T>() && tag.finalizer.is_some() {
120 self.stack
121 .memory
122 .as_mut_ptr()
123 .add(self.position - tag.layout.size())
124 .cast::<T>()
125 .as_mut()
126 } else {
127 None
128 }
129 }
130 }
131
132 pub fn take<T: 'static>(&mut self) -> Option<T> {
133 unsafe {
134 let mut tag = self
135 .stack
136 .memory
137 .as_ptr()
138 .add(self.position)
139 .cast::<DataStackRegisterTag>()
140 .read_unaligned();
141 if tag.type_hash == TypeHash::of::<T>() && tag.finalizer.is_some() {
142 tag.finalizer = None;
143 self.stack
144 .memory
145 .as_mut_ptr()
146 .add(self.position)
147 .cast::<DataStackRegisterTag>()
148 .write_unaligned(tag);
149 Some(
150 self.stack
151 .memory
152 .as_ptr()
153 .add(self.position - tag.layout.size())
154 .cast::<T>()
155 .read_unaligned(),
156 )
157 } else {
158 None
159 }
160 }
161 }
162
163 pub fn free(&mut self) -> bool {
164 unsafe {
165 let mut tag = self
166 .stack
167 .memory
168 .as_ptr()
169 .add(self.position)
170 .cast::<DataStackRegisterTag>()
171 .read_unaligned();
172 if let Some(finalizer) = tag.finalizer {
173 (finalizer)(
174 self.stack
175 .memory
176 .as_mut_ptr()
177 .add(self.position - tag.layout.size())
178 .cast::<()>(),
179 );
180 tag.finalizer = None;
181 self.stack
182 .memory
183 .as_mut_ptr()
184 .add(self.position)
185 .cast::<DataStackRegisterTag>()
186 .write_unaligned(tag);
187 true
188 } else {
189 false
190 }
191 }
192 }
193
194 pub fn set<T: Finalize + 'static>(&mut self, value: T) {
195 unsafe {
196 let mut tag = self
197 .stack
198 .memory
199 .as_ptr()
200 .add(self.position)
201 .cast::<DataStackRegisterTag>()
202 .read_unaligned();
203 if tag.type_hash == TypeHash::of::<T>() {
204 if let Some(finalizer) = tag.finalizer {
205 (finalizer)(
206 self.stack
207 .memory
208 .as_mut_ptr()
209 .add(self.position - tag.layout.size())
210 .cast::<()>(),
211 );
212 } else {
213 tag.finalizer = Some(T::finalize_raw);
214 }
215 self.stack
216 .memory
217 .as_mut_ptr()
218 .add(self.position - tag.layout.size())
219 .cast::<T>()
220 .write_unaligned(value);
221 self.stack
222 .memory
223 .as_mut_ptr()
224 .add(self.position)
225 .cast::<DataStackRegisterTag>()
226 .write_unaligned(tag);
227 }
228 }
229 }
230
231 pub fn move_to(&mut self, other: &mut Self) {
232 if self.position == other.position {
233 return;
234 }
235 unsafe {
236 let mut tag = self
237 .stack
238 .memory
239 .as_ptr()
240 .add(self.position)
241 .cast::<DataStackRegisterTag>()
242 .read_unaligned();
243 let other_tag = other
244 .stack
245 .memory
246 .as_ptr()
247 .add(self.position)
248 .cast::<DataStackRegisterTag>()
249 .read_unaligned();
250 if tag.type_hash == other_tag.type_hash && tag.layout == other_tag.layout {
251 if let Some(finalizer) = other_tag.finalizer {
252 (finalizer)(
253 self.stack
254 .memory
255 .as_mut_ptr()
256 .add(other.position - other_tag.layout.size())
257 .cast::<()>(),
258 );
259 }
260 tag.finalizer = None;
261 let source = self
262 .stack
263 .memory
264 .as_ptr()
265 .add(self.position - tag.layout.size());
266 let target = self
267 .stack
268 .memory
269 .as_mut_ptr()
270 .add(other.position - other_tag.layout.size());
271 target.copy_from(source, tag.layout.size());
272 self.stack
273 .memory
274 .as_mut_ptr()
275 .add(self.position)
276 .cast::<DataStackRegisterTag>()
277 .write_unaligned(tag);
278 }
279 }
280 }
281}
282
283#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)]
284pub enum DataStackMode {
285 Values,
286 Registers,
287 #[default]
288 Mixed,
289}
290
291impl DataStackMode {
292 pub fn allows_values(self) -> bool {
293 matches!(self, Self::Values | Self::Mixed)
294 }
295
296 pub fn allows_registers(self) -> bool {
297 matches!(self, Self::Registers | Self::Mixed)
298 }
299}
300
301pub struct DataStack {
302 memory: Vec<u8>,
303 position: usize,
304 mode: DataStackMode,
305 finalizers: HashMap<TypeHash, DataStackFinalizer>,
306 registers: Vec<usize>,
307 drop: bool,
308}
309
310impl Drop for DataStack {
311 fn drop(&mut self) {
312 if self.drop {
313 self.restore(DataStackToken(0));
314 }
315 }
316}
317
318impl DataStack {
319 pub fn new(mut capacity: usize, mode: DataStackMode) -> Self {
320 capacity = capacity.next_power_of_two();
321 Self {
322 memory: vec![0; capacity],
323 position: 0,
324 mode,
325 finalizers: Default::default(),
326 registers: vec![],
327 drop: true,
328 }
329 }
330
331 pub fn position(&self) -> usize {
332 self.position
333 }
334
335 pub fn size(&self) -> usize {
336 self.memory.len()
337 }
338
339 pub fn available(&self) -> usize {
340 self.size().saturating_sub(self.position)
341 }
342
343 pub fn as_bytes(&self) -> &[u8] {
344 &self.memory[0..self.position]
345 }
346
347 pub fn visit(&self, mut f: impl FnMut(TypeHash, Layout, &[u8], Range<usize>, bool)) {
348 let type_layout = Layout::new::<TypeHash>().pad_to_align();
349 let tag_layout = Layout::new::<DataStackRegisterTag>().pad_to_align();
350 let mut position = self.position;
351 while position > 0 {
352 if position < type_layout.size() {
353 return;
354 }
355 position -= type_layout.size();
356 let type_hash = unsafe {
357 self.memory
358 .as_ptr()
359 .add(position)
360 .cast::<TypeHash>()
361 .read_unaligned()
362 };
363 if type_hash == TypeHash::of::<DataStackRegisterTag>() {
364 if position < tag_layout.size() {
365 return;
366 }
367 position -= tag_layout.size();
368 let tag = unsafe {
369 self.memory
370 .as_ptr()
371 .add(position)
372 .cast::<DataStackRegisterTag>()
373 .read_unaligned()
374 };
375 if position < tag.layout.size() {
376 return;
377 }
378 position -= tag.layout.size();
379 let range = position..(position + tag.layout.size());
380 f(
381 tag.type_hash,
382 tag.layout,
383 &self.memory[range.clone()],
384 range,
385 tag.finalizer.is_some(),
386 );
387 position -= tag.padding as usize;
388 } else if let Some(finalizer) = self.finalizers.get(&type_hash) {
389 if position < finalizer.layout.size() {
390 return;
391 }
392 position -= finalizer.layout.size();
393 let range = position..(position + finalizer.layout.size());
394 f(
395 type_hash,
396 finalizer.layout,
397 &self.memory[range.clone()],
398 range,
399 true,
400 );
401 }
402 }
403 }
404
405 pub fn push<T: Finalize + Sized + 'static>(&mut self, value: T) -> bool {
406 if !self.mode.allows_values() {
407 return false;
408 }
409 let value_layout = Layout::new::<T>().pad_to_align();
410 let type_layout = Layout::new::<TypeHash>().pad_to_align();
411 if self.position + value_layout.size() + type_layout.size() > self.size() {
412 return false;
413 }
414 let type_hash = TypeHash::of::<T>();
415 self.finalizers
416 .entry(type_hash)
417 .or_insert(DataStackFinalizer {
418 callback: T::finalize_raw,
419 layout: value_layout,
420 });
421 unsafe {
422 self.memory
423 .as_mut_ptr()
424 .add(self.position)
425 .cast::<T>()
426 .write_unaligned(value);
427 self.position += value_layout.size();
428 self.memory
429 .as_mut_ptr()
430 .add(self.position)
431 .cast::<TypeHash>()
432 .write_unaligned(type_hash);
433 self.position += type_layout.size();
434 }
435 true
436 }
437
438 pub unsafe fn push_raw(
440 &mut self,
441 layout: Layout,
442 type_hash: TypeHash,
443 finalizer: unsafe fn(*mut ()),
444 data: &[u8],
445 ) -> bool {
446 if !self.mode.allows_values() {
447 return false;
448 }
449 let value_layout = layout.pad_to_align();
450 let type_layout = Layout::new::<TypeHash>().pad_to_align();
451 if data.len() != value_layout.size()
452 && self.position + value_layout.size() + type_layout.size() > self.size()
453 {
454 return false;
455 }
456 self.finalizers
457 .entry(type_hash)
458 .or_insert(DataStackFinalizer {
459 callback: finalizer,
460 layout: value_layout,
461 });
462 self.memory[self.position..(self.position + value_layout.size())].copy_from_slice(data);
463 self.position += value_layout.size();
464 self.memory
465 .as_mut_ptr()
466 .add(self.position)
467 .cast::<TypeHash>()
468 .write_unaligned(type_hash);
469 self.position += type_layout.size();
470 true
471 }
472
473 pub fn push_register<T: Finalize + 'static>(&mut self) -> Option<usize> {
474 unsafe { self.push_register_raw(TypeHash::of::<T>(), Layout::new::<T>().pad_to_align()) }
475 }
476
477 pub fn push_register_value<T: Finalize + 'static>(&mut self, value: T) -> Option<usize> {
478 let result = self.push_register::<T>()?;
479 let mut access = self.access_register(result)?;
480 access.set(value);
481 Some(result)
482 }
483
484 pub unsafe fn push_register_raw(
486 &mut self,
487 type_hash: TypeHash,
488 value_layout: Layout,
489 ) -> Option<usize> {
490 if !self.mode.allows_registers() {
491 return None;
492 }
493 let tag_layout = Layout::new::<DataStackRegisterTag>().pad_to_align();
494 let type_layout = Layout::new::<TypeHash>().pad_to_align();
495 let padding = self.alignment_padding(value_layout.align());
496 if self.position + padding + value_layout.size() + tag_layout.size() + type_layout.size()
497 > self.size()
498 {
499 return None;
500 }
501 unsafe {
502 self.position += padding + value_layout.size();
503 let position = self.position;
504 self.memory
505 .as_mut_ptr()
506 .add(self.position)
507 .cast::<DataStackRegisterTag>()
508 .write_unaligned(DataStackRegisterTag {
509 type_hash,
510 layout: value_layout,
511 finalizer: None,
512 padding: padding as u8,
513 });
514 self.position += tag_layout.size();
515 self.memory
516 .as_mut_ptr()
517 .add(self.position)
518 .cast::<TypeHash>()
519 .write_unaligned(TypeHash::of::<DataStackRegisterTag>());
520 self.position += type_layout.size();
521 self.registers.push(position);
522 Some(self.registers.len() - 1)
523 }
524 }
525
526 pub fn push_stack(&mut self, mut other: Self) -> Result<(), Self> {
527 if self.available() < other.position {
528 return Err(other);
529 }
530 self.memory[self.position..(self.position + other.position)]
531 .copy_from_slice(&other.memory[0..other.position]);
532 self.position += other.position;
533 self.finalizers
534 .extend(other.finalizers.iter().map(|(key, value)| {
535 (
536 *key,
537 DataStackFinalizer {
538 callback: value.callback,
539 layout: value.layout,
540 },
541 )
542 }));
543 unsafe { other.prevent_drop() };
544 Ok(())
545 }
546
547 pub fn push_from_register(&mut self, register: &mut DataStackRegisterAccess) -> bool {
548 if !self.mode.allows_values() {
549 return false;
550 }
551 let type_layout = Layout::new::<TypeHash>().pad_to_align();
552 let mut tag = unsafe {
553 register
554 .stack
555 .memory
556 .as_ptr()
557 .add(register.position)
558 .cast::<DataStackRegisterTag>()
559 .read_unaligned()
560 };
561 if self.position + tag.layout.size() + type_layout.size() > self.size() {
562 return false;
563 }
564 if let Entry::Vacant(e) = self.finalizers.entry(tag.type_hash) {
565 if let Some(finalizer) = tag.finalizer {
566 e.insert(DataStackFinalizer {
567 callback: finalizer,
568 layout: tag.layout,
569 });
570 }
571 }
572 tag.finalizer = None;
573 unsafe {
574 let source = register
575 .stack
576 .memory
577 .as_ptr()
578 .add(register.position - tag.layout.size());
579 let target = self.memory.as_mut_ptr().add(self.position);
580 target.copy_from(source, tag.layout.size());
581 self.position += tag.layout.size();
582 self.memory
583 .as_mut_ptr()
584 .add(self.position)
585 .cast::<TypeHash>()
586 .write_unaligned(tag.type_hash);
587 self.position += type_layout.size();
588 register
589 .stack
590 .memory
591 .as_mut_ptr()
592 .add(register.position)
593 .cast::<DataStackRegisterTag>()
594 .write_unaligned(tag);
595 }
596 true
597 }
598
599 pub fn pop<T: Sized + 'static>(&mut self) -> Option<T> {
600 if !self.mode.allows_values() {
601 return None;
602 }
603 let type_layout = Layout::new::<TypeHash>().pad_to_align();
604 let value_layout = Layout::new::<T>().pad_to_align();
605 if self.position < type_layout.size() + value_layout.size() {
606 return None;
607 }
608 let type_hash = unsafe {
609 self.memory
610 .as_mut_ptr()
611 .add(self.position - type_layout.size())
612 .cast::<TypeHash>()
613 .read_unaligned()
614 };
615 if type_hash != TypeHash::of::<T>() || type_hash == TypeHash::of::<DataStackRegisterTag>() {
616 return None;
617 }
618 self.position -= type_layout.size();
619 let result = unsafe {
620 self.memory
621 .as_ptr()
622 .add(self.position - value_layout.size())
623 .cast::<T>()
624 .read_unaligned()
625 };
626 self.position -= value_layout.size();
627 Some(result)
628 }
629
630 #[allow(clippy::type_complexity)]
632 pub unsafe fn pop_raw(&mut self) -> Option<(Layout, TypeHash, unsafe fn(*mut ()), Vec<u8>)> {
633 if !self.mode.allows_values() {
634 return None;
635 }
636 let type_layout = Layout::new::<TypeHash>().pad_to_align();
637 if self.position < type_layout.size() {
638 return None;
639 }
640 let type_hash = unsafe {
641 self.memory
642 .as_mut_ptr()
643 .add(self.position - type_layout.size())
644 .cast::<TypeHash>()
645 .read_unaligned()
646 };
647 if type_hash == TypeHash::of::<DataStackRegisterTag>() {
648 return None;
649 }
650 let finalizer = self.finalizers.get(&type_hash)?;
651 if self.position < type_layout.size() + finalizer.layout.size() {
652 return None;
653 }
654 self.position -= type_layout.size();
655 let data = self.memory[(self.position - finalizer.layout.size())..self.position].to_vec();
656 self.position -= finalizer.layout.size();
657 Some((finalizer.layout, type_hash, finalizer.callback, data))
658 }
659
660 pub fn drop(&mut self) -> bool {
661 if !self.mode.allows_values() {
662 return false;
663 }
664 let type_layout = Layout::new::<TypeHash>().pad_to_align();
665 self.position -= type_layout.size();
666 let type_hash = unsafe {
667 self.memory
668 .as_ptr()
669 .add(self.position)
670 .cast::<TypeHash>()
671 .read_unaligned()
672 };
673 if type_hash == TypeHash::of::<DataStackRegisterTag>() {
674 return false;
675 }
676 if let Some(finalizer) = self.finalizers.get(&type_hash) {
677 self.position -= finalizer.layout.size();
678 unsafe {
679 (finalizer.callback)(self.memory.as_mut_ptr().add(self.position).cast::<()>());
680 }
681 }
682 true
683 }
684
685 pub fn drop_register(&mut self) -> bool {
686 if !self.mode.allows_registers() {
687 return false;
688 }
689 let tag_layout = Layout::new::<DataStackRegisterTag>().pad_to_align();
690 let type_layout = Layout::new::<TypeHash>().pad_to_align();
691 unsafe {
692 let type_hash = self
693 .memory
694 .as_mut_ptr()
695 .add(self.position - type_layout.size())
696 .cast::<TypeHash>()
697 .read_unaligned();
698 if type_hash != TypeHash::of::<DataStackRegisterTag>() {
699 return false;
700 }
701 self.position -= type_layout.size();
702 self.position -= tag_layout.size();
703 let tag = self
704 .memory
705 .as_ptr()
706 .add(self.position)
707 .cast::<DataStackRegisterTag>()
708 .read_unaligned();
709 self.position -= tag.layout.size() - tag.padding as usize;
710 if let Some(finalizer) = tag.finalizer {
711 (finalizer)(self.memory.as_mut_ptr().add(self.position).cast::<()>());
712 }
713 self.registers.pop();
714 }
715 true
716 }
717
718 pub fn pop_stack(&mut self, mut data_count: usize, capacity: Option<usize>) -> Self {
719 let type_layout = Layout::new::<TypeHash>().pad_to_align();
720 let mut size = 0;
721 let mut position = self.position;
722 let mut finalizers = HashMap::new();
723 while data_count > 0 && position > 0 {
724 data_count -= 1;
725 position -= type_layout.size();
726 size += type_layout.size();
727 let type_hash = unsafe {
728 self.memory
729 .as_mut_ptr()
730 .add(position)
731 .cast::<TypeHash>()
732 .read_unaligned()
733 };
734 if let Some(finalizer) = self.finalizers.get(&type_hash) {
735 position -= finalizer.layout.size();
736 size += finalizer.layout.size();
737 finalizers.insert(
738 type_hash,
739 DataStackFinalizer {
740 callback: finalizer.callback,
741 layout: finalizer.layout,
742 },
743 );
744 }
745 }
746 let mut result = Self::new(capacity.unwrap_or(size).max(size), self.mode);
747 result.memory[0..size].copy_from_slice(&self.memory[position..self.position]);
748 result.finalizers.extend(finalizers);
749 self.position = position;
750 result.position = size;
751 result
752 }
753
754 pub fn pop_to_register(&mut self, register: &mut DataStackRegisterAccess) -> bool {
755 if !self.mode.allows_values() {
756 return false;
757 }
758 let type_layout = Layout::new::<TypeHash>().pad_to_align();
759 if self.position < type_layout.size() {
760 return false;
761 }
762 let type_hash = unsafe {
763 self.memory
764 .as_mut_ptr()
765 .add(self.position - type_layout.size())
766 .cast::<TypeHash>()
767 .read_unaligned()
768 };
769 let mut tag = unsafe {
770 register
771 .stack
772 .memory
773 .as_ptr()
774 .add(register.position)
775 .cast::<DataStackRegisterTag>()
776 .read_unaligned()
777 };
778 if type_hash != tag.type_hash || type_hash == TypeHash::of::<DataStackRegisterTag>() {
779 return false;
780 }
781 if self.position < type_layout.size() + tag.layout.size() {
782 return false;
783 }
784 let finalizer = match self.finalizers.get(&type_hash) {
785 Some(finalizer) => finalizer.callback,
786 None => return false,
787 };
788 unsafe {
789 if let Some(finalizer) = tag.finalizer {
790 (finalizer)(
791 register
792 .stack
793 .memory
794 .as_mut_ptr()
795 .add(register.position - tag.layout.size())
796 .cast::<()>(),
797 );
798 }
799 tag.finalizer = Some(finalizer);
800 let source = self
801 .memory
802 .as_ptr()
803 .add(self.position - type_layout.size() - tag.layout.size());
804 let target = register
805 .stack
806 .memory
807 .as_mut_ptr()
808 .add(register.position - tag.layout.size());
809 target.copy_from(source, tag.layout.size());
810 register
811 .stack
812 .memory
813 .as_mut_ptr()
814 .add(register.position)
815 .cast::<DataStackRegisterTag>()
816 .write_unaligned(tag);
817 }
818 self.position -= type_layout.size();
819 self.position -= tag.layout.size();
820 true
821 }
822
823 pub fn store(&self) -> DataStackToken {
824 DataStackToken(self.position)
825 }
826
827 pub fn restore(&mut self, token: DataStackToken) {
828 let type_layout = Layout::new::<TypeHash>().pad_to_align();
829 let tag_layout = Layout::new::<DataStackRegisterTag>().pad_to_align();
830 let tag_type_hash = TypeHash::of::<DataStackRegisterTag>();
831 while self.position > token.0 {
832 self.position -= type_layout.size();
833 let type_hash = unsafe {
834 self.memory
835 .as_ptr()
836 .add(self.position)
837 .cast::<TypeHash>()
838 .read_unaligned()
839 };
840 if type_hash == tag_type_hash {
841 unsafe {
842 let tag = self
843 .memory
844 .as_ptr()
845 .add(self.position - tag_layout.size())
846 .cast::<DataStackRegisterTag>()
847 .read_unaligned();
848 self.position -= tag_layout.size();
849 self.position -= tag.layout.size();
850 if let Some(finalizer) = tag.finalizer {
851 (finalizer)(self.memory.as_mut_ptr().add(self.position).cast::<()>());
852 }
853 self.position -= tag.padding as usize;
854 self.registers.pop();
855 }
856 } else if let Some(finalizer) = self.finalizers.get(&type_hash) {
857 self.position -= finalizer.layout.size();
858 unsafe {
859 (finalizer.callback)(self.memory.as_mut_ptr().add(self.position).cast::<()>());
860 }
861 }
862 }
863 }
864
865 pub fn reverse(&mut self, token: DataStackToken) {
866 let size = self.position.saturating_sub(token.0);
867 let mut meta_data = SmallVec::<[_; 8]>::with_capacity(8);
868 let mut meta_registers = 0;
869 let type_layout = Layout::new::<TypeHash>().pad_to_align();
870 let tag_layout = Layout::new::<DataStackRegisterTag>().pad_to_align();
871 let tag_type_hash = TypeHash::of::<DataStackRegisterTag>();
872 let mut position = self.position;
873 while position > token.0 {
874 position -= type_layout.size();
875 let type_hash = unsafe {
876 self.memory
877 .as_mut_ptr()
878 .add(position)
879 .cast::<TypeHash>()
880 .read_unaligned()
881 };
882 if type_hash == tag_type_hash {
883 unsafe {
884 let tag = self
885 .memory
886 .as_ptr()
887 .add(self.position - tag_layout.size())
888 .cast::<DataStackRegisterTag>()
889 .read_unaligned();
890 position -= tag_layout.size();
891 position -= tag.layout.size();
892 meta_data.push((
893 position - token.0,
894 type_layout.size() + tag_layout.size() + tag.layout.size(),
895 ));
896 meta_registers += 1;
897 }
898 } else if let Some(finalizer) = self.finalizers.get(&type_hash) {
899 position -= finalizer.layout.size();
900 meta_data.push((
901 position - token.0,
902 type_layout.size() + finalizer.layout.size(),
903 ));
904 }
905 }
906 if meta_data.len() <= 1 {
907 return;
908 }
909 let mut memory = SmallVec::<[_; 256]>::new();
910 memory.resize(size, 0);
911 memory.copy_from_slice(&self.memory[token.0..self.position]);
912 for (source_position, size) in meta_data {
913 self.memory[position..(position + size)]
914 .copy_from_slice(&memory[source_position..(source_position + size)]);
915 position += size;
916 }
917 let start = self.registers.len() - meta_registers;
918 self.registers[start..].reverse();
919 }
920
921 pub fn peek(&self) -> Option<TypeHash> {
922 if self.position == 0 {
923 return None;
924 }
925 let type_layout = Layout::new::<TypeHash>().pad_to_align();
926 Some(unsafe {
927 self.memory
928 .as_ptr()
929 .add(self.position - type_layout.size())
930 .cast::<TypeHash>()
931 .read_unaligned()
932 })
933 }
934
935 pub fn registers_count(&self) -> usize {
936 self.registers.len()
937 }
938
939 pub fn access_register(&mut self, index: usize) -> Option<DataStackRegisterAccess> {
940 let position = *self.registers.get(index)?;
941 Some(DataStackRegisterAccess {
942 stack: self,
943 position,
944 })
945 }
946
947 pub fn access_registers_pair(
948 &mut self,
949 a: usize,
950 b: usize,
951 ) -> Option<(DataStackRegisterAccess, DataStackRegisterAccess)> {
952 if a == b {
953 return None;
954 }
955 let position_a = *self.registers.get(a)?;
956 let position_b = *self.registers.get(b)?;
957 unsafe {
958 Some((
959 DataStackRegisterAccess {
960 stack: (self as *mut Self).as_mut()?,
961 position: position_a,
962 },
963 DataStackRegisterAccess {
964 stack: (self as *mut Self).as_mut()?,
965 position: position_b,
966 },
967 ))
968 }
969 }
970
971 pub unsafe fn prevent_drop(&mut self) {
973 self.drop = false;
974 }
975
976 #[inline]
978 unsafe fn alignment_padding(&self, alignment: usize) -> usize {
979 pointer_alignment_padding(self.memory.as_ptr().add(self.position), alignment)
980 }
981}
982
983pub trait DataStackPack: Sized {
984 fn stack_push(self, stack: &mut DataStack);
985
986 fn stack_push_reversed(self, stack: &mut DataStack) {
987 let token = stack.store();
988 self.stack_push(stack);
989 stack.reverse(token);
990 }
991
992 fn stack_pop(stack: &mut DataStack) -> Self;
993
994 fn pack_types() -> Vec<TypeHash>;
995}
996
997impl DataStackPack for () {
998 fn stack_push(self, _: &mut DataStack) {}
999
1000 fn stack_pop(_: &mut DataStack) -> Self {}
1001
1002 fn pack_types() -> Vec<TypeHash> {
1003 vec![]
1004 }
1005}
1006
1007macro_rules! impl_data_stack_tuple {
1008 ($($type:ident),+) => {
1009 impl<$($type: 'static),+> DataStackPack for ($($type,)+) {
1010 #[allow(non_snake_case)]
1011 fn stack_push(self, stack: &mut DataStack) {
1012 let ($( $type, )+) = self;
1013 $( stack.push($type); )+
1014 }
1015
1016 #[allow(non_snake_case)]
1017 fn stack_pop(stack: &mut DataStack) -> Self {
1018 ($(
1019 stack.pop::<$type>().unwrap_or_else(
1020 || panic!("Could not pop data of type: {}", std::any::type_name::<$type>())
1021 ),
1022 )+)
1023 }
1024
1025 #[allow(non_snake_case)]
1026 fn pack_types() -> Vec<TypeHash> {
1027 vec![ $( TypeHash::of::<$type>() ),+ ]
1028 }
1029 }
1030 };
1031}
1032
1033impl_data_stack_tuple!(A);
1034impl_data_stack_tuple!(A, B);
1035impl_data_stack_tuple!(A, B, C);
1036impl_data_stack_tuple!(A, B, C, D);
1037impl_data_stack_tuple!(A, B, C, D, E);
1038impl_data_stack_tuple!(A, B, C, D, E, F);
1039impl_data_stack_tuple!(A, B, C, D, E, F, G);
1040impl_data_stack_tuple!(A, B, C, D, E, F, G, H);
1041impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I);
1042impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J);
1043impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K);
1044impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K, L);
1045impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M);
1046impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N);
1047impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O);
1048impl_data_stack_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P);
1049
1050#[cfg(test)]
1051mod tests {
1052 use crate::{
1053 data_stack::{DataStack, DataStackMode},
1054 type_hash::TypeHash,
1055 };
1056 use std::{alloc::Layout, cell::RefCell, rc::Rc};
1057
1058 #[test]
1059 fn test_data_stack() {
1060 struct Droppable(Rc<RefCell<bool>>);
1061
1062 impl Drop for Droppable {
1063 fn drop(&mut self) {
1064 *self.0.borrow_mut() = true;
1065 }
1066 }
1067
1068 let dropped = Rc::new(RefCell::new(false));
1069 let mut stack = DataStack::new(10240, DataStackMode::Values);
1070 assert_eq!(stack.size(), 16384);
1071 assert_eq!(stack.position(), 0);
1072 stack.push(Droppable(dropped.clone()));
1073 assert_eq!(stack.position(), 16);
1074 let token = stack.store();
1075 stack.push(42_usize);
1076 assert_eq!(stack.position(), 32);
1077 stack.push(true);
1078 assert_eq!(stack.position(), 41);
1079 stack.push(4.2_f32);
1080 assert_eq!(stack.position(), 53);
1081 assert!(!*dropped.borrow());
1082 assert!(stack.pop::<()>().is_none());
1083 stack.push(());
1084 assert_eq!(stack.position(), 61);
1085 stack.reverse(token);
1086 let mut stack2 = stack.pop_stack(2, None);
1087 assert_eq!(stack.position(), 36);
1088 assert_eq!(stack2.size(), 32);
1089 assert_eq!(stack2.position(), 25);
1090 assert_eq!(stack2.pop::<usize>().unwrap(), 42_usize);
1091 assert_eq!(stack2.position(), 9);
1092 assert!(stack2.pop::<bool>().unwrap());
1093 assert_eq!(stack2.position(), 0);
1094 stack2.push(true);
1095 stack2.push(42_usize);
1096 stack.push_stack(stack2).ok().unwrap();
1097 assert_eq!(stack.position(), 61);
1098 assert_eq!(stack.pop::<usize>().unwrap(), 42_usize);
1099 assert_eq!(stack.position(), 45);
1100 assert!(stack.pop::<bool>().unwrap());
1101 assert_eq!(stack.position(), 36);
1102 assert_eq!(stack.pop::<f32>().unwrap(), 4.2_f32);
1103 assert_eq!(stack.position(), 24);
1104 stack.pop::<()>().unwrap();
1105 assert_eq!(stack.position(), 16);
1106 stack.push(42_usize);
1107 unsafe {
1108 let (layout, type_hash, finalizer, data) = stack.pop_raw().unwrap();
1109 assert_eq!(layout, Layout::new::<usize>().pad_to_align());
1110 assert_eq!(type_hash, TypeHash::of::<usize>());
1111 assert!(stack.push_raw(layout, type_hash, finalizer, &data));
1112 assert_eq!(stack.position(), 32);
1113 assert_eq!(stack.pop::<usize>().unwrap(), 42_usize);
1114 assert_eq!(stack.position(), 16);
1115 }
1116 drop(stack);
1117 assert!(*dropped.borrow());
1118
1119 let mut stack = DataStack::new(10240, DataStackMode::Registers);
1120 assert_eq!(stack.size(), 16384);
1121 stack.push_register::<bool>().unwrap();
1122 stack.drop_register();
1123 let a = stack.push_register_value(true).unwrap();
1124 assert!(*stack.access_register(a).unwrap().read::<bool>().unwrap());
1125 assert!(stack.access_register(a).unwrap().take::<bool>().unwrap());
1126 assert!(!stack.access_register(a).unwrap().has_value());
1127 let b = stack.push_register_value(0usize).unwrap();
1128 stack.access_register(b).unwrap().set(42usize);
1129 assert_eq!(
1130 *stack.access_register(b).unwrap().read::<usize>().unwrap(),
1131 42
1132 );
1133 }
1134}