1use crate::constant_hash::Table;
10use alloc::vec::Vec;
11use core::fmt::{self, Display, Formatter};
12use core::ops::{Deref, DerefMut};
13use core::str::FromStr;
14
15#[cfg(feature = "enable-serde")]
16use serde_derive::{Deserialize, Serialize};
17
18use crate::bitset::ScalarBitSet;
19use crate::entity;
20use crate::ir::{
21 self,
22 condcodes::{FloatCC, IntCC},
23 trapcode::TrapCode,
24 types, Block, FuncRef, MemFlags, SigRef, StackSlot, Type, Value,
25};
26
27pub type ValueList = entity::EntityList<Value>;
31
32pub type ValueListPool = entity::ListPool<Value>;
34
35#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
50#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
51pub struct BlockCall {
52 values: entity::EntityList<Value>,
56}
57
58impl BlockCall {
59 fn value_to_block(val: Value) -> Block {
62 Block::from_u32(val.as_u32())
63 }
64
65 fn block_to_value(block: Block) -> Value {
68 Value::from_u32(block.as_u32())
69 }
70
71 pub fn new(block: Block, args: &[Value], pool: &mut ValueListPool) -> Self {
73 let mut values = ValueList::default();
74 values.push(Self::block_to_value(block), pool);
75 values.extend(args.iter().copied(), pool);
76 Self { values }
77 }
78
79 pub fn block(&self, pool: &ValueListPool) -> Block {
81 let val = self.values.first(pool).unwrap();
82 Self::value_to_block(val)
83 }
84
85 pub fn set_block(&mut self, block: Block, pool: &mut ValueListPool) {
87 *self.values.get_mut(0, pool).unwrap() = Self::block_to_value(block);
88 }
89
90 pub fn append_argument(&mut self, arg: Value, pool: &mut ValueListPool) {
92 self.values.push(arg, pool);
93 }
94
95 pub fn args_slice<'a>(&self, pool: &'a ValueListPool) -> &'a [Value] {
97 &self.values.as_slice(pool)[1..]
98 }
99
100 pub fn args_slice_mut<'a>(&'a mut self, pool: &'a mut ValueListPool) -> &'a mut [Value] {
102 &mut self.values.as_mut_slice(pool)[1..]
103 }
104
105 pub fn remove(&mut self, ix: usize, pool: &mut ValueListPool) {
107 self.values.remove(1 + ix, pool)
108 }
109
110 pub fn clear(&mut self, pool: &mut ValueListPool) {
112 self.values.truncate(1, pool)
113 }
114
115 pub fn extend<I>(&mut self, elements: I, pool: &mut ValueListPool)
117 where
118 I: IntoIterator<Item = Value>,
119 {
120 self.values.extend(elements, pool)
121 }
122
123 pub fn display<'a>(&self, pool: &'a ValueListPool) -> DisplayBlockCall<'a> {
125 DisplayBlockCall { block: *self, pool }
126 }
127
128 pub fn deep_clone(&self, pool: &mut ValueListPool) -> Self {
132 Self {
133 values: self.values.deep_clone(pool),
134 }
135 }
136}
137
138pub struct DisplayBlockCall<'a> {
140 block: BlockCall,
141 pool: &'a ValueListPool,
142}
143
144impl<'a> Display for DisplayBlockCall<'a> {
145 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
146 write!(f, "{}", self.block.block(&self.pool))?;
147 let args = self.block.args_slice(&self.pool);
148 if !args.is_empty() {
149 write!(f, "(")?;
150 for (ix, arg) in args.iter().enumerate() {
151 if ix > 0 {
152 write!(f, ", ")?;
153 }
154 write!(f, "{arg}")?;
155 }
156 write!(f, ")")?;
157 }
158 Ok(())
159 }
160}
161
162include!(concat!(env!("OUT_DIR"), "/opcodes.rs"));
178
179impl Display for Opcode {
180 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
181 write!(f, "{}", opcode_name(*self))
182 }
183}
184
185impl Opcode {
186 pub fn format(self) -> InstructionFormat {
188 OPCODE_FORMAT[self as usize - 1]
189 }
190
191 pub fn constraints(self) -> OpcodeConstraints {
194 OPCODE_CONSTRAINTS[self as usize - 1]
195 }
196
197 #[inline]
201 pub fn is_safepoint(self) -> bool {
202 self.is_call() && !self.is_return()
203 }
204}
205
206impl FromStr for Opcode {
211 type Err = &'static str;
212
213 fn from_str(s: &str) -> Result<Self, &'static str> {
215 use crate::constant_hash::{probe, simple_hash};
216
217 match probe::<&str, [Option<Self>]>(&OPCODE_HASH_TABLE, s, simple_hash(s)) {
218 Err(_) => Err("Unknown opcode"),
219 Ok(i) => Ok(OPCODE_HASH_TABLE[i].unwrap()),
222 }
223 }
224}
225
226impl<'a> Table<&'a str> for [Option<Opcode>] {
227 fn len(&self) -> usize {
228 self.len()
229 }
230
231 fn key(&self, idx: usize) -> Option<&'a str> {
232 self[idx].map(opcode_name)
233 }
234}
235
236#[derive(Clone, Debug)]
239pub struct VariableArgs(Vec<Value>);
240
241impl VariableArgs {
242 pub fn new() -> Self {
244 Self(Vec::new())
245 }
246
247 pub fn push(&mut self, v: Value) {
249 self.0.push(v)
250 }
251
252 pub fn is_empty(&self) -> bool {
254 self.0.is_empty()
255 }
256
257 pub fn into_value_list(self, fixed: &[Value], pool: &mut ValueListPool) -> ValueList {
259 let mut vlist = ValueList::default();
260 vlist.extend(fixed.iter().cloned(), pool);
261 vlist.extend(self.0, pool);
262 vlist
263 }
264}
265
266impl Deref for VariableArgs {
268 type Target = [Value];
269
270 fn deref(&self) -> &[Value] {
271 &self.0
272 }
273}
274
275impl DerefMut for VariableArgs {
276 fn deref_mut(&mut self) -> &mut [Value] {
277 &mut self.0
278 }
279}
280
281impl Display for VariableArgs {
282 fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
283 for (i, val) in self.0.iter().enumerate() {
284 if i == 0 {
285 write!(fmt, "{val}")?;
286 } else {
287 write!(fmt, ", {val}")?;
288 }
289 }
290 Ok(())
291 }
292}
293
294impl Default for VariableArgs {
295 fn default() -> Self {
296 Self::new()
297 }
298}
299
300impl InstructionData {
305 pub fn branch_destination<'a>(&'a self, jump_tables: &'a ir::JumpTables) -> &'a [BlockCall] {
309 match self {
310 Self::Jump { destination, .. } => std::slice::from_ref(destination),
311 Self::Brif { blocks, .. } => blocks.as_slice(),
312 Self::BranchTable { table, .. } => jump_tables.get(*table).unwrap().all_branches(),
313 _ => {
314 debug_assert!(!self.opcode().is_branch());
315 &[]
316 }
317 }
318 }
319
320 pub fn branch_destination_mut<'a>(
324 &'a mut self,
325 jump_tables: &'a mut ir::JumpTables,
326 ) -> &'a mut [BlockCall] {
327 match self {
328 Self::Jump { destination, .. } => std::slice::from_mut(destination),
329 Self::Brif { blocks, .. } => blocks.as_mut_slice(),
330 Self::BranchTable { table, .. } => {
331 jump_tables.get_mut(*table).unwrap().all_branches_mut()
332 }
333 _ => {
334 debug_assert!(!self.opcode().is_branch());
335 &mut []
336 }
337 }
338 }
339
340 pub fn map_values(
343 &mut self,
344 pool: &mut ValueListPool,
345 jump_tables: &mut ir::JumpTables,
346 mut f: impl FnMut(Value) -> Value,
347 ) {
348 for arg in self.arguments_mut(pool) {
349 *arg = f(*arg);
350 }
351
352 for block in self.branch_destination_mut(jump_tables) {
353 for arg in block.args_slice_mut(pool) {
354 *arg = f(*arg);
355 }
356 }
357 }
358
359 pub fn trap_code(&self) -> Option<TrapCode> {
362 match *self {
363 Self::CondTrap { code, .. } | Self::Trap { code, .. } => Some(code),
364 _ => None,
365 }
366 }
367
368 pub fn cond_code(&self) -> Option<IntCC> {
371 match self {
372 &InstructionData::IntCompare { cond, .. }
373 | &InstructionData::IntCompareImm { cond, .. } => Some(cond),
374 _ => None,
375 }
376 }
377
378 pub fn fp_cond_code(&self) -> Option<FloatCC> {
381 match self {
382 &InstructionData::FloatCompare { cond, .. } => Some(cond),
383 _ => None,
384 }
385 }
386
387 pub fn trap_code_mut(&mut self) -> Option<&mut TrapCode> {
390 match self {
391 Self::CondTrap { code, .. } | Self::Trap { code, .. } => Some(code),
392 _ => None,
393 }
394 }
395
396 pub fn atomic_rmw_op(&self) -> Option<ir::AtomicRmwOp> {
398 match self {
399 &InstructionData::AtomicRmw { op, .. } => Some(op),
400 _ => None,
401 }
402 }
403
404 pub fn load_store_offset(&self) -> Option<i32> {
406 match self {
407 &InstructionData::Load { offset, .. }
408 | &InstructionData::StackLoad { offset, .. }
409 | &InstructionData::Store { offset, .. }
410 | &InstructionData::StackStore { offset, .. } => Some(offset.into()),
411 _ => None,
412 }
413 }
414
415 pub fn memflags(&self) -> Option<MemFlags> {
417 match self {
418 &InstructionData::Load { flags, .. }
419 | &InstructionData::LoadNoOffset { flags, .. }
420 | &InstructionData::Store { flags, .. }
421 | &InstructionData::StoreNoOffset { flags, .. }
422 | &InstructionData::AtomicCas { flags, .. }
423 | &InstructionData::AtomicRmw { flags, .. } => Some(flags),
424 _ => None,
425 }
426 }
427
428 pub fn stack_slot(&self) -> Option<StackSlot> {
430 match self {
431 &InstructionData::StackStore { stack_slot, .. }
432 | &InstructionData::StackLoad { stack_slot, .. } => Some(stack_slot),
433 _ => None,
434 }
435 }
436
437 pub fn analyze_call<'a>(&'a self, pool: &'a ValueListPool) -> CallInfo<'a> {
441 match *self {
442 Self::Call {
443 func_ref, ref args, ..
444 } => CallInfo::Direct(func_ref, args.as_slice(pool)),
445 Self::CallIndirect {
446 sig_ref, ref args, ..
447 } => CallInfo::Indirect(sig_ref, &args.as_slice(pool)[1..]),
448 Self::Ternary {
449 opcode: Opcode::StackSwitch,
450 ..
451 } => {
452 CallInfo::NotACall
455 }
456 _ => {
457 debug_assert!(!self.opcode().is_call());
458 CallInfo::NotACall
459 }
460 }
461 }
462
463 #[inline]
464 pub(crate) fn mask_immediates(&mut self, ctrl_typevar: Type) {
465 if ctrl_typevar.is_invalid() {
466 return;
467 }
468
469 let bit_width = ctrl_typevar.bits();
470
471 match self {
472 Self::UnaryImm { opcode: _, imm } => {
473 *imm = imm.mask_to_width(bit_width);
474 }
475 Self::BinaryImm64 {
476 opcode,
477 arg: _,
478 imm,
479 } => {
480 if *opcode == Opcode::SdivImm || *opcode == Opcode::SremImm {
481 *imm = imm.mask_to_width(bit_width);
482 }
483 }
484 Self::IntCompareImm {
485 opcode,
486 arg: _,
487 cond,
488 imm,
489 } => {
490 debug_assert_eq!(*opcode, Opcode::IcmpImm);
491 if cond.unsigned() != *cond {
492 *imm = imm.mask_to_width(bit_width);
493 }
494 }
495 _ => {}
496 }
497 }
498}
499
500pub enum CallInfo<'a> {
502 NotACall,
504
505 Direct(FuncRef, &'a [Value]),
508
509 Indirect(SigRef, &'a [Value]),
511}
512
513#[derive(Clone, Copy)]
519pub struct OpcodeConstraints {
520 flags: u8,
539
540 typeset_offset: u8,
542
543 constraint_offset: u16,
547}
548
549impl OpcodeConstraints {
550 pub fn use_typevar_operand(self) -> bool {
554 (self.flags & 0x8) != 0
555 }
556
557 pub fn requires_typevar_operand(self) -> bool {
564 (self.flags & 0x10) != 0
565 }
566
567 pub fn num_fixed_results(self) -> usize {
570 (self.flags & 0x7) as usize
571 }
572
573 pub fn num_fixed_value_arguments(self) -> usize {
581 ((self.flags >> 5) & 0x7) as usize
582 }
583
584 fn typeset_offset(self) -> Option<usize> {
587 let offset = usize::from(self.typeset_offset);
588 if offset < TYPE_SETS.len() {
589 Some(offset)
590 } else {
591 None
592 }
593 }
594
595 fn constraint_offset(self) -> usize {
597 self.constraint_offset as usize
598 }
599
600 pub fn result_type(self, n: usize, ctrl_type: Type) -> Type {
603 debug_assert!(n < self.num_fixed_results(), "Invalid result index");
604 match OPERAND_CONSTRAINTS[self.constraint_offset() + n].resolve(ctrl_type) {
605 ResolvedConstraint::Bound(t) => t,
606 ResolvedConstraint::Free(ts) => panic!("Result constraints can't be free: {ts:?}"),
607 }
608 }
609
610 pub fn value_argument_constraint(self, n: usize, ctrl_type: Type) -> ResolvedConstraint {
616 debug_assert!(
617 n < self.num_fixed_value_arguments(),
618 "Invalid value argument index"
619 );
620 let offset = self.constraint_offset() + self.num_fixed_results();
621 OPERAND_CONSTRAINTS[offset + n].resolve(ctrl_type)
622 }
623
624 pub fn ctrl_typeset(self) -> Option<ValueTypeSet> {
627 self.typeset_offset().map(|offset| TYPE_SETS[offset])
628 }
629
630 pub fn is_polymorphic(self) -> bool {
632 self.ctrl_typeset().is_some()
633 }
634}
635
636type BitSet8 = ScalarBitSet<u8>;
637type BitSet16 = ScalarBitSet<u16>;
638
639#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
641pub struct ValueTypeSet {
642 pub lanes: BitSet16,
644 pub ints: BitSet8,
646 pub floats: BitSet8,
648 pub dynamic_lanes: BitSet16,
650}
651
652impl ValueTypeSet {
653 fn is_base_type(self, scalar: Type) -> bool {
657 let l2b = u8::try_from(scalar.log2_lane_bits()).unwrap();
658 if scalar.is_int() {
659 self.ints.contains(l2b)
660 } else if scalar.is_float() {
661 self.floats.contains(l2b)
662 } else {
663 false
664 }
665 }
666
667 pub fn contains(self, typ: Type) -> bool {
669 if typ.is_dynamic_vector() {
670 let l2l = u8::try_from(typ.log2_min_lane_count()).unwrap();
671 self.dynamic_lanes.contains(l2l) && self.is_base_type(typ.lane_type())
672 } else {
673 let l2l = u8::try_from(typ.log2_lane_count()).unwrap();
674 self.lanes.contains(l2l) && self.is_base_type(typ.lane_type())
675 }
676 }
677
678 pub fn example(self) -> Type {
682 let t = if self.ints.max().unwrap_or(0) > 5 {
683 types::I32
684 } else if self.floats.max().unwrap_or(0) > 5 {
685 types::F32
686 } else {
687 types::I8
688 };
689 t.by(1 << self.lanes.min().unwrap()).unwrap()
690 }
691}
692
693enum OperandConstraint {
695 Concrete(Type),
697
698 Free(u8),
701
702 Same,
704
705 LaneOf,
707
708 AsTruthy,
710
711 HalfWidth,
713
714 DoubleWidth,
716
717 SplitLanes,
719
720 MergeLanes,
722
723 DynamicToVector,
725
726 Narrower,
728
729 Wider,
731}
732
733impl OperandConstraint {
734 pub fn resolve(&self, ctrl_type: Type) -> ResolvedConstraint {
737 use self::OperandConstraint::*;
738 use self::ResolvedConstraint::Bound;
739 match *self {
740 Concrete(t) => Bound(t),
741 Free(vts) => ResolvedConstraint::Free(TYPE_SETS[vts as usize]),
742 Same => Bound(ctrl_type),
743 LaneOf => Bound(ctrl_type.lane_of()),
744 AsTruthy => Bound(ctrl_type.as_truthy()),
745 HalfWidth => Bound(ctrl_type.half_width().expect("invalid type for half_width")),
746 DoubleWidth => Bound(
747 ctrl_type
748 .double_width()
749 .expect("invalid type for double_width"),
750 ),
751 SplitLanes => {
752 if ctrl_type.is_dynamic_vector() {
753 Bound(
754 ctrl_type
755 .dynamic_to_vector()
756 .expect("invalid type for dynamic_to_vector")
757 .split_lanes()
758 .expect("invalid type for split_lanes")
759 .vector_to_dynamic()
760 .expect("invalid dynamic type"),
761 )
762 } else {
763 Bound(
764 ctrl_type
765 .split_lanes()
766 .expect("invalid type for split_lanes"),
767 )
768 }
769 }
770 MergeLanes => {
771 if ctrl_type.is_dynamic_vector() {
772 Bound(
773 ctrl_type
774 .dynamic_to_vector()
775 .expect("invalid type for dynamic_to_vector")
776 .merge_lanes()
777 .expect("invalid type for merge_lanes")
778 .vector_to_dynamic()
779 .expect("invalid dynamic type"),
780 )
781 } else {
782 Bound(
783 ctrl_type
784 .merge_lanes()
785 .expect("invalid type for merge_lanes"),
786 )
787 }
788 }
789 DynamicToVector => Bound(
790 ctrl_type
791 .dynamic_to_vector()
792 .expect("invalid type for dynamic_to_vector"),
793 ),
794 Narrower => {
795 let ctrl_type_bits = ctrl_type.log2_lane_bits();
796 let mut tys = ValueTypeSet::default();
797
798 tys.lanes = ScalarBitSet::from_range(0, 1);
800
801 if ctrl_type.is_int() {
802 tys.ints = BitSet8::from_range(3, ctrl_type_bits as u8);
805 } else if ctrl_type.is_float() {
806 tys.floats = BitSet8::from_range(4, ctrl_type_bits as u8);
809 } else {
810 panic!("The Narrower constraint only operates on floats or ints");
811 }
812 ResolvedConstraint::Free(tys)
813 }
814 Wider => {
815 let ctrl_type_bits = ctrl_type.log2_lane_bits();
816 let mut tys = ValueTypeSet::default();
817
818 tys.lanes = ScalarBitSet::from_range(0, 1);
820
821 if ctrl_type.is_int() {
822 let lower_bound = ctrl_type_bits as u8 + 1;
823 if lower_bound < BitSet8::capacity() {
829 tys.ints = BitSet8::from_range(lower_bound, 8);
833 }
834 } else if ctrl_type.is_float() {
835 let lower_bound = ctrl_type_bits as u8 + 1;
837 if lower_bound < BitSet8::capacity() {
838 tys.floats = BitSet8::from_range(lower_bound, 8);
839 }
840 } else {
841 panic!("The Wider constraint only operates on floats or ints");
842 }
843
844 ResolvedConstraint::Free(tys)
845 }
846 }
847 }
848}
849
850#[derive(Copy, Clone, Debug, PartialEq, Eq)]
852pub enum ResolvedConstraint {
853 Bound(Type),
855 Free(ValueTypeSet),
857}
858
859#[cfg(test)]
860mod tests {
861 use super::*;
862 use alloc::string::ToString;
863
864 #[test]
865 fn inst_data_is_copy() {
866 fn is_copy<T: Copy>() {}
867 is_copy::<InstructionData>();
868 }
869
870 #[test]
871 fn inst_data_size() {
872 assert_eq!(std::mem::size_of::<InstructionData>(), 16);
875 }
876
877 #[test]
878 fn opcodes() {
879 use core::mem;
880
881 let x = Opcode::Iadd;
882 let mut y = Opcode::Isub;
883
884 assert!(x != y);
885 y = Opcode::Iadd;
886 assert_eq!(x, y);
887 assert_eq!(x.format(), InstructionFormat::Binary);
888
889 assert_eq!(format!("{:?}", Opcode::IaddImm), "IaddImm");
890 assert_eq!(Opcode::IaddImm.to_string(), "iadd_imm");
891
892 assert_eq!("iadd".parse::<Opcode>(), Ok(Opcode::Iadd));
894 assert_eq!("iadd_imm".parse::<Opcode>(), Ok(Opcode::IaddImm));
895 assert_eq!("iadd\0".parse::<Opcode>(), Err("Unknown opcode"));
896 assert_eq!("".parse::<Opcode>(), Err("Unknown opcode"));
897 assert_eq!("\0".parse::<Opcode>(), Err("Unknown opcode"));
898
899 assert_eq!(mem::size_of::<Opcode>(), mem::size_of::<Option<Opcode>>());
904 }
905
906 #[test]
907 fn instruction_data() {
908 use core::mem;
909 assert_eq!(mem::size_of::<InstructionData>(), 16);
914 }
915
916 #[test]
917 fn constraints() {
918 let a = Opcode::Iadd.constraints();
919 assert!(a.use_typevar_operand());
920 assert!(!a.requires_typevar_operand());
921 assert_eq!(a.num_fixed_results(), 1);
922 assert_eq!(a.num_fixed_value_arguments(), 2);
923 assert_eq!(a.result_type(0, types::I32), types::I32);
924 assert_eq!(a.result_type(0, types::I8), types::I8);
925 assert_eq!(
926 a.value_argument_constraint(0, types::I32),
927 ResolvedConstraint::Bound(types::I32)
928 );
929 assert_eq!(
930 a.value_argument_constraint(1, types::I32),
931 ResolvedConstraint::Bound(types::I32)
932 );
933
934 let b = Opcode::Bitcast.constraints();
935 assert!(!b.use_typevar_operand());
936 assert!(!b.requires_typevar_operand());
937 assert_eq!(b.num_fixed_results(), 1);
938 assert_eq!(b.num_fixed_value_arguments(), 1);
939 assert_eq!(b.result_type(0, types::I32), types::I32);
940 assert_eq!(b.result_type(0, types::I8), types::I8);
941 match b.value_argument_constraint(0, types::I32) {
942 ResolvedConstraint::Free(vts) => assert!(vts.contains(types::F32)),
943 _ => panic!("Unexpected constraint from value_argument_constraint"),
944 }
945
946 let c = Opcode::Call.constraints();
947 assert_eq!(c.num_fixed_results(), 0);
948 assert_eq!(c.num_fixed_value_arguments(), 0);
949
950 let i = Opcode::CallIndirect.constraints();
951 assert_eq!(i.num_fixed_results(), 0);
952 assert_eq!(i.num_fixed_value_arguments(), 1);
953
954 let cmp = Opcode::Icmp.constraints();
955 assert!(cmp.use_typevar_operand());
956 assert!(cmp.requires_typevar_operand());
957 assert_eq!(cmp.num_fixed_results(), 1);
958 assert_eq!(cmp.num_fixed_value_arguments(), 2);
959 assert_eq!(cmp.result_type(0, types::I64), types::I8);
960 }
961
962 #[test]
963 fn value_set() {
964 use crate::ir::types::*;
965
966 let vts = ValueTypeSet {
967 lanes: BitSet16::from_range(0, 8),
968 ints: BitSet8::from_range(4, 7),
969 floats: BitSet8::from_range(0, 0),
970 dynamic_lanes: BitSet16::from_range(0, 4),
971 };
972 assert!(!vts.contains(I8));
973 assert!(vts.contains(I32));
974 assert!(vts.contains(I64));
975 assert!(vts.contains(I32X4));
976 assert!(vts.contains(I32X4XN));
977 assert!(!vts.contains(F16));
978 assert!(!vts.contains(F32));
979 assert!(!vts.contains(F128));
980 assert_eq!(vts.example().to_string(), "i32");
981
982 let vts = ValueTypeSet {
983 lanes: BitSet16::from_range(0, 8),
984 ints: BitSet8::from_range(0, 0),
985 floats: BitSet8::from_range(5, 7),
986 dynamic_lanes: BitSet16::from_range(0, 8),
987 };
988 assert_eq!(vts.example().to_string(), "f32");
989
990 let vts = ValueTypeSet {
991 lanes: BitSet16::from_range(1, 8),
992 ints: BitSet8::from_range(0, 0),
993 floats: BitSet8::from_range(5, 7),
994 dynamic_lanes: BitSet16::from_range(0, 8),
995 };
996 assert_eq!(vts.example().to_string(), "f32x2");
997
998 let vts = ValueTypeSet {
999 lanes: BitSet16::from_range(2, 8),
1000 ints: BitSet8::from_range(3, 7),
1001 floats: BitSet8::from_range(0, 0),
1002 dynamic_lanes: BitSet16::from_range(0, 8),
1003 };
1004 assert_eq!(vts.example().to_string(), "i32x4");
1005
1006 let vts = ValueTypeSet {
1007 lanes: BitSet16::from_range(0, 9),
1009 ints: BitSet8::from_range(3, 7),
1010 floats: BitSet8::from_range(0, 0),
1011 dynamic_lanes: BitSet16::from_range(0, 8),
1012 };
1013 assert!(vts.contains(I32));
1014 assert!(vts.contains(I32X4));
1015 }
1016}