mod code_builder;
pub(crate) mod encode;
mod terminate;
use crate::{arbitrary_loop, limited_string, unique_string, Config};
use arbitrary::{Arbitrary, Result, Unstructured};
use code_builder::CodeBuilderAllocations;
use flagset::{flags, FlagSet};
use std::collections::{HashMap, HashSet};
use std::fmt;
use std::mem;
use std::ops::Range;
use std::rc::Rc;
use std::str::{self, FromStr};
use wasm_encoder::{
AbstractHeapType, ArrayType, BlockType, ConstExpr, ExportKind, FieldType, HeapType, RefType,
StorageType, StructType, ValType,
};
pub(crate) use wasm_encoder::{GlobalType, MemoryType, TableType};
const CHANCE_OFFSET_INBOUNDS: usize = 10; const CHANCE_SEGMENT_ON_EMPTY: usize = 10; const PCT_INBOUNDS: f64 = 0.995; type Instruction = wasm_encoder::Instruction<'static>;
pub struct Module {
config: Config,
duplicate_imports_behavior: DuplicateImportsBehavior,
valtypes: Vec<ValType>,
types: Vec<SubType>,
rec_groups: Vec<Range<usize>>,
super_to_sub_types: HashMap<u32, Vec<u32>>,
can_subtype: Vec<u32>,
should_encode_types: bool,
must_share: bool,
imports: Vec<Import>,
should_encode_imports: bool,
array_types: Vec<u32>,
func_types: Vec<u32>,
struct_types: Vec<u32>,
num_imports: usize,
num_defined_tags: usize,
num_defined_funcs: usize,
defined_tables: Vec<Option<ConstExpr>>,
num_defined_memories: usize,
defined_globals: Vec<(u32, ConstExpr)>,
tags: Vec<TagType>,
funcs: Vec<(u32, Rc<FuncType>)>,
tables: Vec<TableType>,
globals: Vec<GlobalType>,
memories: Vec<MemoryType>,
exports: Vec<(String, ExportKind, u32)>,
start: Option<u32>,
elems: Vec<ElementSegment>,
code: Vec<Code>,
data: Vec<DataSegment>,
type_size: u32,
export_names: HashSet<String>,
const_expr_choices: Vec<Box<dyn Fn(&mut Unstructured, ValType) -> Result<ConstExpr>>>,
max_type_limit: MaxTypeLimit,
interesting_values32: Vec<u32>,
interesting_values64: Vec<u64>,
}
impl<'a> Arbitrary<'a> for Module {
fn arbitrary(u: &mut Unstructured<'a>) -> Result<Self> {
Module::new(Config::default(), u)
}
}
impl fmt::Debug for Module {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Module")
.field("config", &self.config)
.field(&"...", &"...")
.finish()
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum DuplicateImportsBehavior {
Allowed,
Disallowed,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum AllowEmptyRecGroup {
Yes,
No,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum MaxTypeLimit {
ModuleTypes,
Num(u32),
}
impl Module {
pub fn config(&self) -> &Config {
&self.config
}
pub fn new(config: Config, u: &mut Unstructured<'_>) -> Result<Self> {
Self::new_internal(config, u, DuplicateImportsBehavior::Allowed)
}
pub(crate) fn new_internal(
config: Config,
u: &mut Unstructured<'_>,
duplicate_imports_behavior: DuplicateImportsBehavior,
) -> Result<Self> {
let mut module = Module::empty(config, duplicate_imports_behavior);
module.build(u)?;
Ok(module)
}
fn empty(mut config: Config, duplicate_imports_behavior: DuplicateImportsBehavior) -> Self {
config.sanitize();
Module {
config,
duplicate_imports_behavior,
valtypes: Vec::new(),
types: Vec::new(),
rec_groups: Vec::new(),
can_subtype: Vec::new(),
super_to_sub_types: HashMap::new(),
should_encode_types: false,
imports: Vec::new(),
should_encode_imports: false,
array_types: Vec::new(),
func_types: Vec::new(),
struct_types: Vec::new(),
num_imports: 0,
num_defined_tags: 0,
num_defined_funcs: 0,
defined_tables: Vec::new(),
num_defined_memories: 0,
defined_globals: Vec::new(),
tags: Vec::new(),
funcs: Vec::new(),
tables: Vec::new(),
globals: Vec::new(),
memories: Vec::new(),
exports: Vec::new(),
start: None,
elems: Vec::new(),
code: Vec::new(),
data: Vec::new(),
type_size: 0,
export_names: HashSet::new(),
const_expr_choices: Vec::new(),
max_type_limit: MaxTypeLimit::ModuleTypes,
interesting_values32: Vec::new(),
interesting_values64: Vec::new(),
must_share: false,
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub(crate) struct SubType {
pub(crate) is_final: bool,
pub(crate) supertype: Option<u32>,
pub(crate) composite_type: CompositeType,
}
impl SubType {
fn unwrap_struct(&self) -> &StructType {
self.composite_type.unwrap_struct()
}
fn unwrap_func(&self) -> &Rc<FuncType> {
self.composite_type.unwrap_func()
}
fn unwrap_array(&self) -> &ArrayType {
self.composite_type.unwrap_array()
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub(crate) struct CompositeType {
pub inner: CompositeInnerType,
pub shared: bool,
}
impl CompositeType {
#[cfg(any(feature = "component-model", feature = "wasmparser"))]
pub(crate) fn new_func(func: Rc<FuncType>, shared: bool) -> Self {
Self {
inner: CompositeInnerType::Func(func),
shared,
}
}
fn unwrap_func(&self) -> &Rc<FuncType> {
match &self.inner {
CompositeInnerType::Func(f) => f,
_ => panic!("not a func"),
}
}
fn unwrap_array(&self) -> &ArrayType {
match &self.inner {
CompositeInnerType::Array(a) => a,
_ => panic!("not an array"),
}
}
fn unwrap_struct(&self) -> &StructType {
match &self.inner {
CompositeInnerType::Struct(s) => s,
_ => panic!("not a struct"),
}
}
}
impl From<&CompositeType> for wasm_encoder::CompositeType {
fn from(ty: &CompositeType) -> Self {
let inner = match &ty.inner {
CompositeInnerType::Array(a) => wasm_encoder::CompositeInnerType::Array(*a),
CompositeInnerType::Func(f) => wasm_encoder::CompositeInnerType::Func(
wasm_encoder::FuncType::new(f.params.iter().cloned(), f.results.iter().cloned()),
),
CompositeInnerType::Struct(s) => wasm_encoder::CompositeInnerType::Struct(s.clone()),
};
wasm_encoder::CompositeType {
shared: ty.shared,
inner,
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub(crate) enum CompositeInnerType {
Array(ArrayType),
Func(Rc<FuncType>),
Struct(StructType),
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub(crate) struct FuncType {
pub(crate) params: Vec<ValType>,
pub(crate) results: Vec<ValType>,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub(crate) struct Import {
pub(crate) module: String,
pub(crate) field: String,
pub(crate) entity_type: EntityType,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub(crate) enum EntityType {
Global(GlobalType),
Table(TableType),
Memory(MemoryType),
Tag(TagType),
Func(u32, Rc<FuncType>),
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub(crate) struct TagType {
func_type_idx: u32,
func_type: Rc<FuncType>,
}
#[derive(Debug)]
struct ElementSegment {
kind: ElementKind,
ty: RefType,
items: Elements,
}
#[derive(Debug)]
enum ElementKind {
Passive,
Declared,
Active {
table: Option<u32>, offset: Offset,
},
}
#[derive(Debug)]
enum Elements {
Functions(Vec<u32>),
Expressions(Vec<ConstExpr>),
}
#[derive(Debug)]
struct Code {
locals: Vec<ValType>,
instructions: Instructions,
}
#[derive(Debug)]
enum Instructions {
Generated(Vec<Instruction>),
Arbitrary(Vec<u8>),
}
#[derive(Debug)]
struct DataSegment {
kind: DataSegmentKind,
init: Vec<u8>,
}
#[derive(Debug)]
enum DataSegmentKind {
Passive,
Active { memory_index: u32, offset: Offset },
}
#[derive(Debug)]
pub(crate) enum Offset {
Const32(i32),
Const64(i64),
Global(u32),
}
impl Module {
fn build(&mut self, u: &mut Unstructured) -> Result<()> {
self.valtypes = configured_valtypes(&self.config);
if self.arbitrary_imports_from_available(u)? {
self.arbitrary_types(u)?;
} else {
self.arbitrary_types(u)?;
self.arbitrary_imports(u)?;
}
self.should_encode_imports = !self.imports.is_empty() || u.arbitrary()?;
self.arbitrary_tags(u)?;
self.arbitrary_funcs(u)?;
self.arbitrary_tables(u)?;
self.arbitrary_memories(u)?;
self.arbitrary_globals(u)?;
if !self.required_exports(u)? {
self.arbitrary_exports(u)?;
};
self.should_encode_types = !self.types.is_empty() || u.arbitrary()?;
self.arbitrary_start(u)?;
self.arbitrary_elems(u)?;
self.arbitrary_data(u)?;
self.arbitrary_code(u)?;
Ok(())
}
#[inline]
fn val_type_is_sub_type(&self, a: ValType, b: ValType) -> bool {
match (a, b) {
(a, b) if a == b => true,
(ValType::Ref(a), ValType::Ref(b)) => self.ref_type_is_sub_type(a, b),
_ => false,
}
}
fn ref_type_is_sub_type(&self, a: RefType, b: RefType) -> bool {
if a == b {
return true;
}
if a.nullable && !b.nullable {
return false;
}
self.heap_type_is_sub_type(a.heap_type, b.heap_type)
}
fn heap_type_is_sub_type(&self, a: HeapType, b: HeapType) -> bool {
use AbstractHeapType::*;
use CompositeInnerType as CT;
use HeapType as HT;
match (a, b) {
(a, b) if a == b => true,
(
HT::Abstract {
shared: a_shared,
ty: a_ty,
},
HT::Abstract {
shared: b_shared,
ty: b_ty,
},
) => {
a_shared == b_shared
&& match (a_ty, b_ty) {
(Eq | I31 | Struct | Array | None, Any) => true,
(I31 | Struct | Array | None, Eq) => true,
(NoExtern, Extern) => true,
(NoFunc, Func) => true,
(None, I31 | Array | Struct) => true,
(NoExn, Exn) => true,
_ => false,
}
}
(HT::Concrete(a), HT::Abstract { shared, ty }) => {
let a_ty = &self.ty(a).composite_type;
if a_ty.shared != shared {
return false;
}
match ty {
Eq | Any => matches!(a_ty.inner, CT::Array(_) | CT::Struct(_)),
Struct => matches!(a_ty.inner, CT::Struct(_)),
Array => matches!(a_ty.inner, CT::Array(_)),
Func => matches!(a_ty.inner, CT::Func(_)),
_ => false,
}
}
(HT::Abstract { shared, ty }, HT::Concrete(b)) => {
let b_ty = &self.ty(b).composite_type;
if shared != b_ty.shared {
return false;
}
match ty {
None => matches!(b_ty.inner, CT::Array(_) | CT::Struct(_)),
NoFunc => matches!(b_ty.inner, CT::Func(_)),
_ => false,
}
}
(HT::Concrete(mut a), HT::Concrete(b)) => loop {
if a == b {
return true;
}
if let Some(supertype) = self.ty(a).supertype {
a = supertype;
} else {
return false;
}
},
}
}
fn arbitrary_types(&mut self, u: &mut Unstructured) -> Result<()> {
assert!(self.config.min_types <= self.config.max_types);
while self.types.len() < self.config.min_types {
self.arbitrary_rec_group(u, AllowEmptyRecGroup::No)?;
}
while self.types.len() < self.config.max_types {
let keep_going = u.arbitrary().unwrap_or(false);
if !keep_going {
break;
}
self.arbitrary_rec_group(u, AllowEmptyRecGroup::Yes)?;
}
Ok(())
}
fn add_type(&mut self, ty: SubType) -> u32 {
let index = u32::try_from(self.types.len()).unwrap();
if let Some(supertype) = ty.supertype {
assert_eq!(self.is_shared_type(supertype), ty.composite_type.shared);
self.super_to_sub_types
.entry(supertype)
.or_default()
.push(index);
}
let list = match &ty.composite_type.inner {
CompositeInnerType::Array(_) => &mut self.array_types,
CompositeInnerType::Func(_) => &mut self.func_types,
CompositeInnerType::Struct(_) => &mut self.struct_types,
};
list.push(index);
if !ty.is_final {
self.can_subtype.push(index);
}
self.types.push(ty);
index
}
fn arbitrary_rec_group(
&mut self,
u: &mut Unstructured,
kind: AllowEmptyRecGroup,
) -> Result<()> {
let rec_group_start = self.types.len();
assert!(matches!(self.max_type_limit, MaxTypeLimit::ModuleTypes));
if self.config.gc_enabled {
if self.clonable_rec_groups(kind).next().is_some() && u.ratio(1, u8::MAX)? {
return self.clone_rec_group(u, kind);
}
let max_rec_group_size = self.config.max_types - self.types.len();
let min_rec_group_size = match kind {
AllowEmptyRecGroup::Yes => 0,
AllowEmptyRecGroup::No => 1,
};
let rec_group_size = u.int_in_range(min_rec_group_size..=max_rec_group_size)?;
let type_ref_limit = u32::try_from(self.types.len() + rec_group_size).unwrap();
self.max_type_limit = MaxTypeLimit::Num(type_ref_limit);
for _ in 0..rec_group_size {
let ty = self.arbitrary_sub_type(u)?;
self.add_type(ty);
}
} else {
let type_ref_limit = u32::try_from(self.types.len()).unwrap();
self.max_type_limit = MaxTypeLimit::Num(type_ref_limit);
let ty = self.arbitrary_sub_type(u)?;
self.add_type(ty);
}
self.max_type_limit = MaxTypeLimit::ModuleTypes;
self.rec_groups.push(rec_group_start..self.types.len());
Ok(())
}
fn clonable_rec_groups(
&self,
kind: AllowEmptyRecGroup,
) -> impl Iterator<Item = Range<usize>> + '_ {
self.rec_groups
.iter()
.filter(move |r| {
match kind {
AllowEmptyRecGroup::Yes => {}
AllowEmptyRecGroup::No => {
if r.is_empty() {
return false;
}
}
}
r.end - r.start <= self.config.max_types.saturating_sub(self.types.len())
})
.cloned()
}
fn clone_rec_group(&mut self, u: &mut Unstructured, kind: AllowEmptyRecGroup) -> Result<()> {
let candidates: Vec<_> = self.clonable_rec_groups(kind).collect();
let group = u.choose(&candidates)?.clone();
let new_rec_group_start = self.types.len();
for index in group {
let orig_ty_index = u32::try_from(index).unwrap();
let ty = self.ty(orig_ty_index).clone();
self.add_type(ty);
}
self.rec_groups.push(new_rec_group_start..self.types.len());
Ok(())
}
fn arbitrary_sub_type(&mut self, u: &mut Unstructured) -> Result<SubType> {
if !self.config.gc_enabled {
let shared = self.arbitrary_shared(u)?;
let func_type = self.propagate_shared(shared, |m| m.arbitrary_func_type(u))?;
let composite_type = CompositeType {
inner: CompositeInnerType::Func(func_type),
shared,
};
return Ok(SubType {
is_final: true,
supertype: None,
composite_type,
});
}
if !self.can_subtype.is_empty() && u.ratio(1, 32_u8)? {
self.arbitrary_sub_type_of_super_type(u)
} else {
Ok(SubType {
is_final: u.arbitrary()?,
supertype: None,
composite_type: self.arbitrary_composite_type(u)?,
})
}
}
fn arbitrary_sub_type_of_super_type(&mut self, u: &mut Unstructured) -> Result<SubType> {
let supertype = *u.choose(&self.can_subtype)?;
let mut composite_type = self.types[usize::try_from(supertype).unwrap()]
.composite_type
.clone();
match &mut composite_type.inner {
CompositeInnerType::Array(a) => {
a.0 = self.arbitrary_matching_field_type(u, a.0)?;
}
CompositeInnerType::Func(f) => {
*f = self.arbitrary_matching_func_type(u, f)?;
}
CompositeInnerType::Struct(s) => {
*s = self.propagate_shared(composite_type.shared, |m| {
m.arbitrary_matching_struct_type(u, s)
})?;
}
}
Ok(SubType {
is_final: u.arbitrary()?,
supertype: Some(supertype),
composite_type,
})
}
fn arbitrary_matching_struct_type(
&mut self,
u: &mut Unstructured,
ty: &StructType,
) -> Result<StructType> {
let len_extra_fields = u.int_in_range(0..=5)?;
let mut fields = Vec::with_capacity(ty.fields.len() + len_extra_fields);
for field in ty.fields.iter() {
fields.push(self.arbitrary_matching_field_type(u, *field)?);
}
for _ in 0..len_extra_fields {
fields.push(self.arbitrary_field_type(u)?);
}
Ok(StructType {
fields: fields.into_boxed_slice(),
})
}
fn arbitrary_matching_field_type(
&mut self,
u: &mut Unstructured,
ty: FieldType,
) -> Result<FieldType> {
Ok(FieldType {
element_type: self.arbitrary_matching_storage_type(u, ty.element_type)?,
mutable: if ty.mutable { u.arbitrary()? } else { false },
})
}
fn arbitrary_matching_storage_type(
&mut self,
u: &mut Unstructured,
ty: StorageType,
) -> Result<StorageType> {
match ty {
StorageType::I8 => Ok(StorageType::I8),
StorageType::I16 => Ok(StorageType::I16),
StorageType::Val(ty) => Ok(StorageType::Val(self.arbitrary_matching_val_type(u, ty)?)),
}
}
fn arbitrary_matching_val_type(
&mut self,
u: &mut Unstructured,
ty: ValType,
) -> Result<ValType> {
match ty {
ValType::I32 => Ok(ValType::I32),
ValType::I64 => Ok(ValType::I64),
ValType::F32 => Ok(ValType::F32),
ValType::F64 => Ok(ValType::F64),
ValType::V128 => Ok(ValType::V128),
ValType::Ref(ty) => Ok(ValType::Ref(self.arbitrary_matching_ref_type(u, ty)?)),
}
}
fn arbitrary_matching_ref_type(&self, u: &mut Unstructured, ty: RefType) -> Result<RefType> {
Ok(RefType {
nullable: ty.nullable,
heap_type: self.arbitrary_matching_heap_type(u, ty.heap_type)?,
})
}
fn arbitrary_matching_heap_type(&self, u: &mut Unstructured, ty: HeapType) -> Result<HeapType> {
use {AbstractHeapType as AHT, CompositeInnerType as CT, HeapType as HT};
if !self.config.gc_enabled {
return Ok(ty);
}
let mut choices = vec![ty];
match ty {
HT::Abstract { shared, ty } => {
use AbstractHeapType::*;
let add_abstract = |choices: &mut Vec<HT>, tys: &[AHT]| {
choices.extend(tys.iter().map(|&ty| HT::Abstract { shared, ty }));
};
let add_concrete = |choices: &mut Vec<HT>, tys: &[u32]| {
choices.extend(
tys.iter()
.filter(|&&idx| shared == self.is_shared_type(idx))
.copied()
.map(HT::Concrete),
);
};
match ty {
Any => {
add_abstract(&mut choices, &[Eq, Struct, Array, I31, None]);
add_concrete(&mut choices, &self.array_types);
add_concrete(&mut choices, &self.struct_types);
}
Eq => {
add_abstract(&mut choices, &[Struct, Array, I31, None]);
add_concrete(&mut choices, &self.array_types);
add_concrete(&mut choices, &self.struct_types);
}
Struct => {
add_abstract(&mut choices, &[Struct, None]);
add_concrete(&mut choices, &self.struct_types);
}
Array => {
add_abstract(&mut choices, &[Array, None]);
add_concrete(&mut choices, &self.array_types);
}
I31 => {
add_abstract(&mut choices, &[None]);
}
Func => {
add_abstract(&mut choices, &[NoFunc]);
add_concrete(&mut choices, &self.func_types);
}
Extern => {
add_abstract(&mut choices, &[NoExtern]);
}
Exn | NoExn | None | NoExtern | NoFunc | Cont | NoCont => {}
}
}
HT::Concrete(idx) => {
if let Some(subs) = self.super_to_sub_types.get(&idx) {
choices.extend(subs.iter().copied().map(HT::Concrete));
}
match self
.types
.get(usize::try_from(idx).unwrap())
.map(|ty| (ty.composite_type.shared, &ty.composite_type.inner))
{
Some((shared, CT::Array(_) | CT::Struct(_))) => choices.push(HT::Abstract {
shared,
ty: AbstractHeapType::None,
}),
Some((shared, CT::Func(_))) => choices.push(HT::Abstract {
shared,
ty: AbstractHeapType::NoFunc,
}),
None => {
}
}
}
}
Ok(*u.choose(&choices)?)
}
fn arbitrary_matching_func_type(
&mut self,
u: &mut Unstructured,
ty: &FuncType,
) -> Result<Rc<FuncType>> {
let mut params = Vec::with_capacity(ty.params.len());
for param in &ty.params {
params.push(self.arbitrary_super_type_of_val_type(u, *param)?);
}
let mut results = Vec::with_capacity(ty.results.len());
for result in &ty.results {
results.push(self.arbitrary_matching_val_type(u, *result)?);
}
Ok(Rc::new(FuncType { params, results }))
}
fn arbitrary_super_type_of_val_type(
&mut self,
u: &mut Unstructured,
ty: ValType,
) -> Result<ValType> {
match ty {
ValType::I32 => Ok(ValType::I32),
ValType::I64 => Ok(ValType::I64),
ValType::F32 => Ok(ValType::F32),
ValType::F64 => Ok(ValType::F64),
ValType::V128 => Ok(ValType::V128),
ValType::Ref(ty) => Ok(ValType::Ref(self.arbitrary_super_type_of_ref_type(u, ty)?)),
}
}
fn arbitrary_super_type_of_ref_type(
&self,
u: &mut Unstructured,
ty: RefType,
) -> Result<RefType> {
Ok(RefType {
nullable: true,
heap_type: self.arbitrary_super_type_of_heap_type(u, ty.heap_type)?,
})
}
fn arbitrary_super_type_of_heap_type(
&self,
u: &mut Unstructured,
ty: HeapType,
) -> Result<HeapType> {
use {AbstractHeapType as AHT, CompositeInnerType as CT, HeapType as HT};
if !self.config.gc_enabled {
return Ok(ty);
}
let mut choices = vec![ty];
match ty {
HT::Abstract { shared, ty } => {
use AbstractHeapType::*;
let add_abstract = |choices: &mut Vec<HT>, tys: &[AHT]| {
choices.extend(tys.iter().map(|&ty| HT::Abstract { shared, ty }));
};
let add_concrete = |choices: &mut Vec<HT>, tys: &[u32]| {
choices.extend(
tys.iter()
.filter(|&&idx| shared == self.is_shared_type(idx))
.copied()
.map(HT::Concrete),
);
};
match ty {
None => {
add_abstract(&mut choices, &[Any, Eq, Struct, Array, I31]);
add_concrete(&mut choices, &self.array_types);
add_concrete(&mut choices, &self.struct_types);
}
NoExtern => {
add_abstract(&mut choices, &[Extern]);
}
NoFunc => {
add_abstract(&mut choices, &[Func]);
add_concrete(&mut choices, &self.func_types);
}
NoExn => {
add_abstract(&mut choices, &[Exn]);
}
Struct | Array | I31 => {
add_abstract(&mut choices, &[Any, Eq]);
}
Eq => {
add_abstract(&mut choices, &[Any]);
}
NoCont => {
add_abstract(&mut choices, &[Cont]);
}
Exn | Any | Func | Extern | Cont => {}
}
}
HT::Concrete(mut idx) => {
if let Some(sub_ty) = &self.types.get(usize::try_from(idx).unwrap()) {
use AbstractHeapType::*;
let ht = |ty| HT::Abstract {
shared: sub_ty.composite_type.shared,
ty,
};
match &sub_ty.composite_type.inner {
CT::Array(_) => {
choices.extend([ht(Any), ht(Eq), ht(Array)]);
}
CT::Func(_) => {
choices.push(ht(Func));
}
CT::Struct(_) => {
choices.extend([ht(Any), ht(Eq), ht(Struct)]);
}
}
} else {
}
while let Some(supertype) = self
.types
.get(usize::try_from(idx).unwrap())
.and_then(|ty| ty.supertype)
{
choices.push(HT::Concrete(supertype));
idx = supertype;
}
}
}
Ok(*u.choose(&choices)?)
}
fn arbitrary_composite_type(&mut self, u: &mut Unstructured) -> Result<CompositeType> {
use CompositeInnerType as CT;
let shared = self.arbitrary_shared(u)?;
if !self.config.gc_enabled {
return Ok(CompositeType {
shared,
inner: CT::Func(self.propagate_shared(shared, |m| m.arbitrary_func_type(u))?),
});
}
match u.int_in_range(0..=2)? {
0 => Ok(CompositeType {
shared,
inner: CT::Array(ArrayType(
self.propagate_shared(shared, |m| m.arbitrary_field_type(u))?,
)),
}),
1 => Ok(CompositeType {
shared,
inner: CT::Func(self.propagate_shared(shared, |m| m.arbitrary_func_type(u))?),
}),
2 => Ok(CompositeType {
shared,
inner: CT::Struct(self.propagate_shared(shared, |m| m.arbitrary_struct_type(u))?),
}),
_ => unreachable!(),
}
}
fn arbitrary_struct_type(&mut self, u: &mut Unstructured) -> Result<StructType> {
let len = u.int_in_range(0..=20)?;
let mut fields = Vec::with_capacity(len);
for _ in 0..len {
fields.push(self.arbitrary_field_type(u)?);
}
Ok(StructType {
fields: fields.into_boxed_slice(),
})
}
fn arbitrary_field_type(&mut self, u: &mut Unstructured) -> Result<FieldType> {
Ok(FieldType {
element_type: self.arbitrary_storage_type(u)?,
mutable: u.arbitrary()?,
})
}
fn arbitrary_storage_type(&mut self, u: &mut Unstructured) -> Result<StorageType> {
match u.int_in_range(0..=2)? {
0 => Ok(StorageType::I8),
1 => Ok(StorageType::I16),
2 => Ok(StorageType::Val(self.arbitrary_valtype(u)?)),
_ => unreachable!(),
}
}
fn arbitrary_ref_type(&self, u: &mut Unstructured) -> Result<RefType> {
if !self.config.reference_types_enabled {
return Ok(RefType::FUNCREF);
}
Ok(RefType {
nullable: true,
heap_type: self.arbitrary_heap_type(u)?,
})
}
fn arbitrary_heap_type(&self, u: &mut Unstructured) -> Result<HeapType> {
assert!(self.config.reference_types_enabled);
let concrete_type_limit = match self.max_type_limit {
MaxTypeLimit::Num(n) => n,
MaxTypeLimit::ModuleTypes => u32::try_from(self.types.len()).unwrap(),
};
if self.config.gc_enabled && concrete_type_limit > 0 && u.arbitrary()? {
let idx = u.int_in_range(0..=concrete_type_limit - 1)?;
if let Some(ty) = self.types.get(idx as usize) {
if !(self.must_share && !ty.composite_type.shared) {
return Ok(HeapType::Concrete(idx));
}
}
}
use AbstractHeapType::*;
let mut choices = vec![Func, Extern];
if self.config.exceptions_enabled {
choices.push(Exn);
}
if self.config.gc_enabled {
choices.extend(
[Any, None, NoExtern, NoFunc, Eq, Struct, Array, I31]
.iter()
.copied(),
);
}
Ok(HeapType::Abstract {
shared: self.arbitrary_shared(u)?,
ty: *u.choose(&choices)?,
})
}
fn arbitrary_func_type(&mut self, u: &mut Unstructured) -> Result<Rc<FuncType>> {
let mut params = vec![];
let mut results = vec![];
let max_params = 20;
arbitrary_loop(u, 0, max_params, |u| {
params.push(self.arbitrary_valtype(u)?);
Ok(true)
})?;
let max_results = if self.config.multi_value_enabled {
max_params
} else {
1
};
arbitrary_loop(u, 0, max_results, |u| {
results.push(self.arbitrary_valtype(u)?);
Ok(true)
})?;
Ok(Rc::new(FuncType { params, results }))
}
fn can_add_local_or_import_tag(&self) -> bool {
self.config.exceptions_enabled
&& self.has_tag_func_types()
&& self.tags.len() < self.config.max_tags
}
fn can_add_local_or_import_func(&self) -> bool {
!self.func_types.is_empty() && self.funcs.len() < self.config.max_funcs
}
fn can_add_local_or_import_table(&self) -> bool {
self.tables.len() < self.config.max_tables
}
fn can_add_local_or_import_global(&self) -> bool {
self.globals.len() < self.config.max_globals
}
fn can_add_local_or_import_memory(&self) -> bool {
self.memories.len() < self.config.max_memories
}
fn arbitrary_imports(&mut self, u: &mut Unstructured) -> Result<()> {
if self.config.max_type_size < self.type_size {
return Ok(());
}
let mut import_strings = HashSet::new();
let mut choices: Vec<fn(&mut Unstructured, &mut Module) -> Result<EntityType>> =
Vec::with_capacity(5);
let min = self.config.min_imports.saturating_sub(self.num_imports);
let max = self.config.max_imports.saturating_sub(self.num_imports);
arbitrary_loop(u, min, max, |u| {
choices.clear();
if self.can_add_local_or_import_tag() {
choices.push(|u, m| {
let ty = m.arbitrary_tag_type(u)?;
Ok(EntityType::Tag(ty))
});
}
if self.can_add_local_or_import_func() {
choices.push(|u, m| {
let idx = *u.choose(&m.func_types)?;
let ty = m.func_type(idx).clone();
Ok(EntityType::Func(idx, ty))
});
}
if self.can_add_local_or_import_global() {
choices.push(|u, m| {
let ty = m.arbitrary_global_type(u)?;
Ok(EntityType::Global(ty))
});
}
if self.can_add_local_or_import_memory() {
choices.push(|u, m| {
let ty = arbitrary_memtype(u, m.config())?;
Ok(EntityType::Memory(ty))
});
}
if self.can_add_local_or_import_table() {
choices.push(|u, m| {
let ty = arbitrary_table_type(u, m.config(), Some(m))?;
Ok(EntityType::Table(ty))
});
}
if choices.is_empty() {
return Ok(false);
}
let f = u.choose(&choices)?;
let entity_type = f(u, self)?;
let budget = self.config.max_type_size - self.type_size;
if entity_type.size() + 1 > budget {
return Ok(false);
}
self.type_size += entity_type.size() + 1;
let mut import_pair = unique_import_strings(1_000, u)?;
if self.duplicate_imports_behavior == DuplicateImportsBehavior::Disallowed {
while import_strings.contains(&import_pair) {
use std::fmt::Write;
write!(&mut import_pair.1, "{}", import_strings.len()).unwrap();
}
import_strings.insert(import_pair.clone());
}
let (module, field) = import_pair;
match &entity_type {
EntityType::Tag(ty) => self.tags.push(ty.clone()),
EntityType::Func(idx, ty) => self.funcs.push((*idx, ty.clone())),
EntityType::Global(ty) => self.globals.push(*ty),
EntityType::Table(ty) => self.tables.push(*ty),
EntityType::Memory(ty) => self.memories.push(*ty),
}
self.num_imports += 1;
self.imports.push(Import {
module,
field,
entity_type,
});
Ok(true)
})?;
Ok(())
}
fn arbitrary_imports_from_available(&mut self, u: &mut Unstructured) -> Result<bool> {
let example_module = if let Some(wasm) = self.config.available_imports.take() {
wasm
} else {
return Ok(false);
};
#[cfg(feature = "wasmparser")]
{
self._arbitrary_imports_from_available(u, &example_module)?;
Ok(true)
}
#[cfg(not(feature = "wasmparser"))]
{
let _ = (example_module, u);
panic!("support for `available_imports` was disabled at compile time");
}
}
#[cfg(feature = "wasmparser")]
fn _arbitrary_imports_from_available(
&mut self,
u: &mut Unstructured,
example_module: &[u8],
) -> Result<()> {
let mut available_types = Vec::new();
let mut available_imports = Vec::<wasmparser::Import>::new();
for payload in wasmparser::Parser::new(0).parse_all(&example_module) {
match payload.expect("could not parse the available import payload") {
wasmparser::Payload::TypeSection(type_reader) => {
for ty in type_reader.into_iter_err_on_gc_types() {
let ty = ty.expect("could not parse type section");
available_types.push((ty, None));
}
}
wasmparser::Payload::ImportSection(import_reader) => {
for im in import_reader {
let im = im.expect("could not read import");
let use_import = u.arbitrary().unwrap_or(false);
if !use_import {
continue;
}
available_imports.push(im);
}
}
_ => {}
}
}
let max_types = self.config.max_types;
let multi_value_enabled = self.config.multi_value_enabled;
let mut new_imports = Vec::with_capacity(available_imports.len());
let first_type_index = self.types.len();
let mut new_types = Vec::<SubType>::new();
let mut make_func_type = |module: &Self, parsed_sig_idx: u32| {
let serialized_sig_idx = match available_types.get_mut(parsed_sig_idx as usize) {
None => panic!("signature index refers to a type out of bounds"),
Some((_, Some(idx))) => *idx as usize,
Some((func_type, index_store)) => {
let multi_value_required = func_type.results().len() > 1;
let new_index = first_type_index + new_types.len();
if new_index >= max_types || (multi_value_required && !multi_value_enabled) {
return None;
}
let func_type = Rc::new(FuncType {
params: func_type
.params()
.iter()
.map(|t| (*t).try_into().unwrap())
.collect(),
results: func_type
.results()
.iter()
.map(|t| (*t).try_into().unwrap())
.collect(),
});
index_store.replace(new_index as u32);
let shared = module.arbitrary_shared(u).ok()?;
new_types.push(SubType {
is_final: true,
supertype: None,
composite_type: CompositeType::new_func(Rc::clone(&func_type), shared),
});
new_index
}
};
match &new_types[serialized_sig_idx - first_type_index]
.composite_type
.inner
{
CompositeInnerType::Func(f) => Some((serialized_sig_idx as u32, Rc::clone(f))),
_ => unimplemented!(),
}
};
for import in available_imports {
let type_size_budget = self.config.max_type_size - self.type_size;
let entity_type = match &import.ty {
wasmparser::TypeRef::Func(sig_idx) => {
if self.funcs.len() >= self.config.max_funcs {
continue;
} else if let Some((sig_idx, func_type)) = make_func_type(&self, *sig_idx) {
let entity = EntityType::Func(sig_idx as u32, Rc::clone(&func_type));
if type_size_budget < entity.size() {
continue;
}
self.funcs.push((sig_idx, func_type));
entity
} else {
continue;
}
}
wasmparser::TypeRef::Tag(wasmparser::TagType { func_type_idx, .. }) => {
let can_add_tag = self.tags.len() < self.config.max_tags;
if !self.config.exceptions_enabled || !can_add_tag {
continue;
} else if let Some((sig_idx, func_type)) = make_func_type(&self, *func_type_idx)
{
let tag_type = TagType {
func_type_idx: sig_idx,
func_type,
};
let entity = EntityType::Tag(tag_type.clone());
if type_size_budget < entity.size() {
continue;
}
self.tags.push(tag_type);
entity
} else {
continue;
}
}
wasmparser::TypeRef::Table(table_ty) => {
let table_ty = TableType::try_from(*table_ty).unwrap();
let entity = EntityType::Table(table_ty);
let type_size = entity.size();
if type_size_budget < type_size || !self.can_add_local_or_import_table() {
continue;
}
self.type_size += type_size;
self.tables.push(table_ty);
entity
}
wasmparser::TypeRef::Memory(memory_ty) => {
let memory_ty = MemoryType::try_from(*memory_ty).unwrap();
let entity = EntityType::Memory(memory_ty);
let type_size = entity.size();
if type_size_budget < type_size || !self.can_add_local_or_import_memory() {
continue;
}
self.type_size += type_size;
self.memories.push(memory_ty);
entity
}
wasmparser::TypeRef::Global(global_ty) => {
let global_ty = (*global_ty).try_into().unwrap();
let entity = EntityType::Global(global_ty);
let type_size = entity.size();
if type_size_budget < type_size || !self.can_add_local_or_import_global() {
continue;
}
self.type_size += type_size;
self.globals.push(global_ty);
entity
}
};
new_imports.push(Import {
module: import.module.to_string(),
field: import.name.to_string(),
entity_type,
});
self.num_imports += 1;
}
for ty in new_types {
self.rec_groups.push(self.types.len()..self.types.len() + 1);
self.add_type(ty);
}
self.imports.extend(new_imports);
Ok(())
}
fn type_of(&self, kind: ExportKind, index: u32) -> EntityType {
match kind {
ExportKind::Global => EntityType::Global(self.globals[index as usize]),
ExportKind::Memory => EntityType::Memory(self.memories[index as usize]),
ExportKind::Table => EntityType::Table(self.tables[index as usize]),
ExportKind::Func => {
let (_idx, ty) = &self.funcs[index as usize];
EntityType::Func(u32::max_value(), ty.clone())
}
ExportKind::Tag => EntityType::Tag(self.tags[index as usize].clone()),
}
}
fn ty(&self, idx: u32) -> &SubType {
&self.types[idx as usize]
}
fn func_types(&self) -> impl Iterator<Item = (u32, &FuncType)> + '_ {
self.func_types
.iter()
.copied()
.map(move |type_i| (type_i, &**self.func_type(type_i)))
}
fn func_type(&self, idx: u32) -> &Rc<FuncType> {
match &self.ty(idx).composite_type.inner {
CompositeInnerType::Func(f) => f,
_ => panic!("types[{idx}] is not a func type"),
}
}
fn tags(&self) -> impl Iterator<Item = (u32, &TagType)> + '_ {
self.tags
.iter()
.enumerate()
.map(move |(i, ty)| (i as u32, ty))
}
fn funcs(&self) -> impl Iterator<Item = (u32, &Rc<FuncType>)> + '_ {
self.funcs
.iter()
.enumerate()
.map(move |(i, (_, ty))| (i as u32, ty))
}
fn has_tag_func_types(&self) -> bool {
self.tag_func_types().next().is_some()
}
fn tag_func_types(&self) -> impl Iterator<Item = u32> + '_ {
self.func_types
.iter()
.copied()
.filter(move |i| self.func_type(*i).results.is_empty())
}
fn arbitrary_valtype(&self, u: &mut Unstructured) -> Result<ValType> {
#[derive(PartialEq, Eq, PartialOrd, Ord)]
enum ValTypeClass {
I32,
I64,
F32,
F64,
V128,
Ref,
}
let mut val_classes: Vec<_> = self
.valtypes
.iter()
.map(|vt| match vt {
ValType::I32 => ValTypeClass::I32,
ValType::I64 => ValTypeClass::I64,
ValType::F32 => ValTypeClass::F32,
ValType::F64 => ValTypeClass::F64,
ValType::V128 => ValTypeClass::V128,
ValType::Ref(_) => ValTypeClass::Ref,
})
.collect();
val_classes.sort_unstable();
val_classes.dedup();
match u.choose(&val_classes)? {
ValTypeClass::I32 => Ok(ValType::I32),
ValTypeClass::I64 => Ok(ValType::I64),
ValTypeClass::F32 => Ok(ValType::F32),
ValTypeClass::F64 => Ok(ValType::F64),
ValTypeClass::V128 => Ok(ValType::V128),
ValTypeClass::Ref => Ok(ValType::Ref(self.arbitrary_ref_type(u)?)),
}
}
fn arbitrary_global_type(&self, u: &mut Unstructured) -> Result<GlobalType> {
let val_type = self.arbitrary_valtype(u)?;
let shared = match val_type {
ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => {
self.arbitrary_shared(u)?
}
ValType::Ref(r) => self.is_shared_ref_type(r),
};
Ok(GlobalType {
val_type,
mutable: u.arbitrary()?,
shared,
})
}
fn arbitrary_tag_type(&self, u: &mut Unstructured) -> Result<TagType> {
let candidate_func_types: Vec<_> = self.tag_func_types().collect();
arbitrary_tag_type(u, &candidate_func_types, |ty_idx| {
self.func_type(ty_idx).clone()
})
}
fn arbitrary_tags(&mut self, u: &mut Unstructured) -> Result<()> {
if !self.config.exceptions_enabled || !self.has_tag_func_types() {
return Ok(());
}
arbitrary_loop(u, self.config.min_tags, self.config.max_tags, |u| {
if !self.can_add_local_or_import_tag() {
return Ok(false);
}
self.tags.push(self.arbitrary_tag_type(u)?);
self.num_defined_tags += 1;
Ok(true)
})
}
fn arbitrary_funcs(&mut self, u: &mut Unstructured) -> Result<()> {
if self.func_types.is_empty() {
return Ok(());
}
let unshared_func_types: Vec<_> = self
.func_types
.iter()
.copied()
.filter(|&i| !self.is_shared_type(i))
.collect();
if unshared_func_types.is_empty() {
return Ok(());
}
arbitrary_loop(u, self.config.min_funcs, self.config.max_funcs, |u| {
if !self.can_add_local_or_import_func() {
return Ok(false);
}
let max = unshared_func_types.len() - 1;
let ty = unshared_func_types[u.int_in_range(0..=max)?];
self.funcs.push((ty, self.func_type(ty).clone()));
self.num_defined_funcs += 1;
Ok(true)
})
}
fn arbitrary_tables(&mut self, u: &mut Unstructured) -> Result<()> {
arbitrary_loop(
u,
self.config.min_tables as usize,
self.config.max_tables as usize,
|u| {
if !self.can_add_local_or_import_table() {
return Ok(false);
}
let ty = arbitrary_table_type(u, self.config(), Some(self))?;
let init = self.arbitrary_table_init(u, ty.element_type)?;
self.defined_tables.push(init);
self.tables.push(ty);
Ok(true)
},
)
}
fn arbitrary_table_init(
&mut self,
u: &mut Unstructured,
ty: RefType,
) -> Result<Option<ConstExpr>> {
if !self.config.gc_enabled {
assert!(ty.nullable);
return Ok(None);
}
if ty.nullable && u.arbitrary()? {
return Ok(None);
}
let expr = self.arbitrary_const_expr(ValType::Ref(ty), u)?;
Ok(Some(expr))
}
fn arbitrary_memories(&mut self, u: &mut Unstructured) -> Result<()> {
arbitrary_loop(
u,
self.config.min_memories as usize,
self.config.max_memories as usize,
|u| {
if !self.can_add_local_or_import_memory() {
return Ok(false);
}
self.num_defined_memories += 1;
self.memories.push(arbitrary_memtype(u, self.config())?);
Ok(true)
},
)
}
fn add_arbitrary_global_of_type(
&mut self,
ty: GlobalType,
u: &mut Unstructured,
) -> Result<u32> {
let expr = self.arbitrary_const_expr(ty.val_type, u)?;
let global_idx = self.globals.len() as u32;
self.globals.push(ty);
self.defined_globals.push((global_idx, expr));
Ok(global_idx)
}
fn arbitrary_const_expr(&mut self, ty: ValType, u: &mut Unstructured) -> Result<ConstExpr> {
let mut choices = mem::take(&mut self.const_expr_choices);
choices.clear();
for i in self.globals_for_const_expr(ty) {
choices.push(Box::new(move |_, _| Ok(ConstExpr::global_get(i))));
}
let ty = self.arbitrary_matching_val_type(u, ty)?;
match ty {
ValType::I32 => {
choices.push(Box::new(|u, _| Ok(ConstExpr::i32_const(u.arbitrary()?))));
if self.config.extended_const_enabled {
choices.push(Box::new(arbitrary_extended_const));
}
}
ValType::I64 => {
choices.push(Box::new(|u, _| Ok(ConstExpr::i64_const(u.arbitrary()?))));
if self.config.extended_const_enabled {
choices.push(Box::new(arbitrary_extended_const));
}
}
ValType::F32 => choices.push(Box::new(|u, _| Ok(ConstExpr::f32_const(u.arbitrary()?)))),
ValType::F64 => choices.push(Box::new(|u, _| Ok(ConstExpr::f64_const(u.arbitrary()?)))),
ValType::V128 => {
choices.push(Box::new(|u, _| Ok(ConstExpr::v128_const(u.arbitrary()?))))
}
ValType::Ref(ty) => {
if ty.nullable {
choices.push(Box::new(move |_, _| Ok(ConstExpr::ref_null(ty.heap_type))));
}
match ty.heap_type {
HeapType::Abstract {
ty: AbstractHeapType::Func,
shared,
} => {
let num_funcs = self
.funcs
.iter()
.filter(|(t, _)| shared == self.is_shared_type(*t))
.count();
if num_funcs > 0 {
let pick = u.int_in_range(0..=num_funcs - 1)?;
let (i, _) = self
.funcs
.iter()
.map(|(t, _)| *t)
.enumerate()
.filter(|(_, t)| shared == self.is_shared_type(*t))
.nth(pick)
.unwrap();
choices.push(Box::new(move |_, _| Ok(ConstExpr::ref_func(i as u32))));
}
}
HeapType::Concrete(ty) => {
for (i, fty) in self.funcs.iter().map(|(t, _)| *t).enumerate() {
if ty != fty {
continue;
}
choices.push(Box::new(move |_, _| Ok(ConstExpr::ref_func(i as u32))));
}
}
_ => {}
}
}
}
let f = u.choose(&choices)?;
let ret = f(u, ty);
self.const_expr_choices = choices;
return ret;
fn arbitrary_extended_const(u: &mut Unstructured<'_>, ty: ValType) -> Result<ConstExpr> {
use wasm_encoder::Instruction::*;
assert!(ty == ValType::I32 || ty == ValType::I64);
let add = if ty == ValType::I32 { I32Add } else { I64Add };
let sub = if ty == ValType::I32 { I32Sub } else { I64Sub };
let mul = if ty == ValType::I32 { I32Mul } else { I64Mul };
let const_: fn(&mut Unstructured<'_>) -> Result<wasm_encoder::Instruction<'static>> =
if ty == ValType::I32 {
|u| u.arbitrary().map(I32Const)
} else {
|u| u.arbitrary().map(I64Const)
};
let mut instrs = Vec::new();
let mut needed = 1;
while needed > 0 {
let choice = if u.is_empty() || instrs.len() > 10 {
0
} else {
u.int_in_range(0..=3)?
};
match choice {
0 => {
instrs.push(const_(u)?);
needed -= 1;
}
1 => {
instrs.push(add.clone());
needed += 1;
}
2 => {
instrs.push(sub.clone());
needed += 1;
}
3 => {
instrs.push(mul.clone());
needed += 1;
}
_ => unreachable!(),
}
}
Ok(ConstExpr::extended(instrs.into_iter().rev()))
}
}
fn arbitrary_globals(&mut self, u: &mut Unstructured) -> Result<()> {
arbitrary_loop(u, self.config.min_globals, self.config.max_globals, |u| {
if !self.can_add_local_or_import_global() {
return Ok(false);
}
let ty = self.arbitrary_global_type(u)?;
self.add_arbitrary_global_of_type(ty, u)?;
Ok(true)
})
}
fn required_exports(&mut self, u: &mut Unstructured) -> Result<bool> {
let example_module = if let Some(wasm) = self.config.exports.clone() {
wasm
} else {
return Ok(false);
};
#[cfg(feature = "wasmparser")]
{
self._required_exports(u, &example_module)?;
Ok(true)
}
#[cfg(not(feature = "wasmparser"))]
{
let _ = (example_module, u);
panic!("support for `exports` was disabled at compile time");
}
}
#[cfg(feature = "wasmparser")]
fn _required_exports(&mut self, u: &mut Unstructured, example_module: &[u8]) -> Result<()> {
let mut required_exports: Vec<wasmparser::Export> = vec![];
let mut validator = wasmparser::Validator::new();
let exports_types = validator
.validate_all(&example_module)
.expect("Failed to validate `exports` Wasm");
for payload in wasmparser::Parser::new(0).parse_all(&example_module) {
match payload.expect("Failed to read `exports` Wasm") {
wasmparser::Payload::ExportSection(export_reader) => {
required_exports = export_reader
.into_iter()
.collect::<Result<_, _>>()
.expect("Failed to read `exports` export section");
}
_ => {}
}
}
let exports_types = exports_types.as_ref();
for export in required_exports {
let new_index = match exports_types
.entity_type_from_export(&export)
.unwrap_or_else(|| {
panic!(
"Unable to get type from export {:?} in `exports` Wasm",
export,
)
}) {
wasmparser::types::EntityType::Func(id) => {
let subtype = exports_types.get(id).unwrap_or_else(|| {
panic!(
"Unable to get subtype for function {:?} in `exports` Wasm",
id
)
});
match &subtype.composite_type.inner {
wasmparser::CompositeInnerType::Func(func_type) => {
assert!(
subtype.is_final,
"Subtype {:?} from `exports` Wasm is not final",
subtype
);
assert!(
subtype.supertype_idx.is_none(),
"Subtype {:?} from `exports` Wasm has non-empty supertype",
subtype
);
let new_type = Rc::new(FuncType {
params: func_type
.params()
.iter()
.copied()
.map(|t| t.try_into().unwrap())
.collect(),
results: func_type
.results()
.iter()
.copied()
.map(|t| t.try_into().unwrap())
.collect(),
});
self.rec_groups.push(self.types.len()..self.types.len() + 1);
let type_index = self.add_type(SubType {
is_final: true,
supertype: None,
composite_type: CompositeType::new_func(
Rc::clone(&new_type),
subtype.composite_type.shared,
),
});
let func_index = self.funcs.len() as u32;
self.funcs.push((type_index, new_type));
self.num_defined_funcs += 1;
func_index
}
_ => panic!(
"Unable to handle type {:?} from `exports` Wasm",
subtype.composite_type
),
}
}
wasmparser::types::EntityType::Global(global_type) => {
self.add_arbitrary_global_of_type(global_type.try_into().unwrap(), u)?
}
wasmparser::types::EntityType::Table(_)
| wasmparser::types::EntityType::Memory(_)
| wasmparser::types::EntityType::Tag(_) => {
panic!(
"Config `exports` has an export of type {:?} which cannot yet be handled.",
export.kind
)
}
};
self.exports
.push((export.name.to_string(), export.kind.into(), new_index));
self.export_names.insert(export.name.to_string());
}
Ok(())
}
fn arbitrary_exports(&mut self, u: &mut Unstructured) -> Result<()> {
if self.config.max_type_size < self.type_size && !self.config.export_everything {
return Ok(());
}
let mut choices: Vec<Vec<(ExportKind, u32)>> = Vec::with_capacity(6);
choices.push(
(0..self.funcs.len())
.map(|i| (ExportKind::Func, i as u32))
.collect(),
);
choices.push(
(0..self.tables.len())
.map(|i| (ExportKind::Table, i as u32))
.collect(),
);
choices.push(
(0..self.memories.len())
.map(|i| (ExportKind::Memory, i as u32))
.collect(),
);
choices.push(
(0..self.globals.len())
.map(|i| (ExportKind::Global, i as u32))
.collect(),
);
if self.config.export_everything {
for choices_by_kind in choices {
for (kind, idx) in choices_by_kind {
let name = unique_string(1_000, &mut self.export_names, u)?;
self.add_arbitrary_export(name, kind, idx)?;
}
}
return Ok(());
}
arbitrary_loop(u, self.config.min_exports, self.config.max_exports, |u| {
let max_size = self.config.max_type_size - self.type_size;
for list in choices.iter_mut() {
list.retain(|(kind, idx)| self.type_of(*kind, *idx).size() + 1 < max_size);
}
choices.retain(|list| !list.is_empty());
if choices.is_empty() {
return Ok(false);
}
let name = unique_string(1_000, &mut self.export_names, u)?;
let list = u.choose(&choices)?;
let (kind, idx) = *u.choose(list)?;
self.add_arbitrary_export(name, kind, idx)?;
Ok(true)
})
}
fn add_arbitrary_export(&mut self, name: String, kind: ExportKind, idx: u32) -> Result<()> {
let ty = self.type_of(kind, idx);
self.type_size += 1 + ty.size();
if self.type_size <= self.config.max_type_size {
self.exports.push((name, kind, idx));
Ok(())
} else {
Err(arbitrary::Error::IncorrectFormat)
}
}
fn arbitrary_start(&mut self, u: &mut Unstructured) -> Result<()> {
if !self.config.allow_start_export {
return Ok(());
}
let mut choices = Vec::with_capacity(self.funcs.len() as usize);
for (func_idx, ty) in self.funcs() {
if ty.params.is_empty() && ty.results.is_empty() {
choices.push(func_idx);
}
}
if !choices.is_empty() && u.arbitrary().unwrap_or(false) {
let f = *u.choose(&choices)?;
self.start = Some(f);
}
Ok(())
}
fn arbitrary_elems(&mut self, u: &mut Unstructured) -> Result<()> {
let mut global_i32 = vec![];
let mut global_i64 = vec![];
if !self.config.disallow_traps {
for i in self.globals_for_const_expr(ValType::I32) {
global_i32.push(i);
}
for i in self.globals_for_const_expr(ValType::I64) {
global_i64.push(i);
}
}
let disallow_traps = self.config.disallow_traps;
let arbitrary_active_elem =
|u: &mut Unstructured, min_mem_size: u64, table: Option<u32>, table_ty: &TableType| {
let global_choices = if table_ty.table64 {
&global_i64
} else {
&global_i32
};
let (offset, max_size_hint) = if !global_choices.is_empty() && u.arbitrary()? {
let g = u.choose(&global_choices)?;
(Offset::Global(*g), None)
} else {
let max_mem_size = if disallow_traps {
table_ty.minimum
} else if table_ty.table64 {
u64::MAX
} else {
u64::from(u32::MAX)
};
let offset = arbitrary_offset(u, min_mem_size, max_mem_size, 0)?;
let max_size_hint = if disallow_traps
|| (offset <= min_mem_size
&& u.int_in_range(0..=CHANCE_OFFSET_INBOUNDS)? != 0)
{
Some(min_mem_size - offset)
} else {
None
};
let offset = if table_ty.table64 {
Offset::Const64(offset as i64)
} else {
Offset::Const32(offset as i32)
};
(offset, max_size_hint)
};
Ok((ElementKind::Active { table, offset }, max_size_hint))
};
type GenElemSegment<'a> =
dyn Fn(&mut Unstructured) -> Result<(ElementKind, Option<u64>)> + 'a;
let mut choices: Vec<Box<GenElemSegment>> = Vec::new();
if self.config.bulk_memory_enabled {
choices.push(Box::new(|_| Ok((ElementKind::Passive, None))));
choices.push(Box::new(|_| Ok((ElementKind::Declared, None))));
}
for (i, ty) in self.tables.iter().enumerate() {
if ty.minimum == 0 && u.int_in_range(0..=CHANCE_SEGMENT_ON_EMPTY)? != 0 {
continue;
}
let minimum = ty.minimum;
let ty = *ty;
if i == 0 && ty.element_type == RefType::FUNCREF {
choices.push(Box::new(move |u| {
arbitrary_active_elem(u, minimum, None, &ty)
}));
}
if self.config.bulk_memory_enabled {
let idx = Some(i as u32);
choices.push(Box::new(move |u| {
arbitrary_active_elem(u, minimum, idx, &ty)
}));
}
}
if choices.is_empty() {
return Ok(());
}
arbitrary_loop(
u,
self.config.min_element_segments,
self.config.max_element_segments,
|u| {
let (kind, max_size_hint) = u.choose(&choices)?(u)?;
let max = max_size_hint
.map(|i| usize::try_from(i).unwrap())
.unwrap_or_else(|| self.config.max_elements);
let ty = match kind {
ElementKind::Passive | ElementKind::Declared => self.arbitrary_ref_type(u)?,
ElementKind::Active { table, .. } => {
let idx = table.unwrap_or(0);
self.arbitrary_matching_ref_type(u, self.tables[idx as usize].element_type)?
}
};
let can_use_function_list = ty == RefType::FUNCREF;
if !self.config.reference_types_enabled {
assert!(can_use_function_list);
}
let mut func_candidates = Vec::new();
if can_use_function_list {
match ty.heap_type {
HeapType::Abstract {
ty: AbstractHeapType::Func,
..
} => {
func_candidates.extend(0..self.funcs.len() as u32);
}
HeapType::Concrete(ty) => {
for (i, (fty, _)) in self.funcs.iter().enumerate() {
if *fty == ty {
func_candidates.push(i as u32);
}
}
}
_ => {}
}
}
let items = if !self.config.reference_types_enabled
|| (can_use_function_list && u.arbitrary()?)
{
let mut init = vec![];
if func_candidates.len() > 0 {
arbitrary_loop(u, self.config.min_elements, max, |u| {
let func_idx = *u.choose(&func_candidates)?;
init.push(func_idx);
Ok(true)
})?;
}
Elements::Functions(init)
} else {
let mut init = vec![];
arbitrary_loop(u, self.config.min_elements, max, |u| {
init.push(self.arbitrary_const_expr(ValType::Ref(ty), u)?);
Ok(true)
})?;
Elements::Expressions(init)
};
self.elems.push(ElementSegment { kind, ty, items });
Ok(true)
},
)
}
fn arbitrary_code(&mut self, u: &mut Unstructured) -> Result<()> {
self.compute_interesting_values();
self.code.reserve(self.num_defined_funcs);
let mut allocs = CodeBuilderAllocations::new(self, self.config.exports.is_some());
for (idx, ty) in self.funcs[self.funcs.len() - self.num_defined_funcs..].iter() {
let shared = self.is_shared_type(*idx);
let body = self.arbitrary_func_body(u, ty, &mut allocs, shared)?;
self.code.push(body);
}
allocs.finish(u, self)?;
Ok(())
}
fn arbitrary_func_body(
&self,
u: &mut Unstructured,
ty: &FuncType,
allocs: &mut CodeBuilderAllocations,
shared: bool,
) -> Result<Code> {
let mut locals = self.arbitrary_locals(u)?;
let builder = allocs.builder(ty, &mut locals, shared);
let instructions = if self.config.allow_invalid_funcs && u.arbitrary().unwrap_or(false) {
Instructions::Arbitrary(arbitrary_vec_u8(u)?)
} else {
Instructions::Generated(builder.arbitrary(u, self)?)
};
Ok(Code {
locals,
instructions,
})
}
fn arbitrary_locals(&self, u: &mut Unstructured) -> Result<Vec<ValType>> {
let mut ret = Vec::new();
arbitrary_loop(u, 0, 100, |u| {
ret.push(self.arbitrary_valtype(u)?);
Ok(true)
})?;
Ok(ret)
}
fn arbitrary_data(&mut self, u: &mut Unstructured) -> Result<()> {
let memories = self.memories.len() as u32;
if memories == 0 && !self.config.bulk_memory_enabled {
return Ok(());
}
let disallow_traps = self.config.disallow_traps;
let mut choices32: Vec<Box<dyn Fn(&mut Unstructured, u64, usize) -> Result<Offset>>> =
vec![];
choices32.push(Box::new(|u, min_size, data_len| {
let min = u32::try_from(min_size.saturating_mul(64 * 1024))
.unwrap_or(u32::MAX)
.into();
let max = if disallow_traps { min } else { u32::MAX.into() };
Ok(Offset::Const32(
arbitrary_offset(u, min, max, data_len)? as i32
))
}));
let mut choices64: Vec<Box<dyn Fn(&mut Unstructured, u64, usize) -> Result<Offset>>> =
vec![];
choices64.push(Box::new(|u, min_size, data_len| {
let min = min_size.saturating_mul(64 * 1024);
let max = if disallow_traps { min } else { u64::MAX };
Ok(Offset::Const64(
arbitrary_offset(u, min, max, data_len)? as i64
))
}));
if !self.config.disallow_traps {
for i in self.globals_for_const_expr(ValType::I32) {
choices32.push(Box::new(move |_, _, _| Ok(Offset::Global(i))));
}
for i in self.globals_for_const_expr(ValType::I64) {
choices64.push(Box::new(move |_, _, _| Ok(Offset::Global(i))));
}
}
let mut memories = Vec::new();
for (i, mem) in self.memories.iter().enumerate() {
if mem.minimum > 0 || u.int_in_range(0..=CHANCE_SEGMENT_ON_EMPTY)? == 0 {
memories.push(i as u32);
}
}
if memories.is_empty() && !self.config.bulk_memory_enabled {
return Ok(());
}
arbitrary_loop(
u,
self.config.min_data_segments,
self.config.max_data_segments,
|u| {
let mut init: Vec<u8> = u.arbitrary()?;
let kind =
if self.config.bulk_memory_enabled && (memories.is_empty() || u.arbitrary()?) {
DataSegmentKind::Passive
} else {
let memory_index = *u.choose(&memories)?;
let mem = &self.memories[memory_index as usize];
let f = if mem.memory64 {
u.choose(&choices64)?
} else {
u.choose(&choices32)?
};
let mut offset = f(u, mem.minimum, init.len())?;
if self.config.disallow_traps {
let max_size = (u64::MAX / 64 / 1024).min(mem.minimum) * 64 * 1024;
init.truncate(max_size as usize);
let max_offset = max_size - init.len() as u64;
match &mut offset {
Offset::Const32(x) => {
*x = (*x as u64).min(max_offset) as i32;
}
Offset::Const64(x) => {
*x = (*x as u64).min(max_offset) as i64;
}
Offset::Global(_) => unreachable!(),
}
}
DataSegmentKind::Active {
offset,
memory_index,
}
};
self.data.push(DataSegment { kind, init });
Ok(true)
},
)
}
fn params_results(&self, ty: &BlockType) -> (Vec<ValType>, Vec<ValType>) {
match ty {
BlockType::Empty => (vec![], vec![]),
BlockType::Result(t) => (vec![], vec![*t]),
BlockType::FunctionType(ty) => {
let ty = self.func_type(*ty);
(ty.params.to_vec(), ty.results.to_vec())
}
}
}
fn globals_for_const_expr(&self, ty: ValType) -> impl Iterator<Item = u32> + '_ {
let num_imported_globals = self.globals.len() - self.defined_globals.len();
let max_global = if self.config.gc_enabled {
self.globals.len()
} else {
num_imported_globals
};
self.globals[..max_global]
.iter()
.enumerate()
.filter_map(move |(i, g)| {
if !g.mutable && self.val_type_is_sub_type(g.val_type, ty) {
Some(i as u32)
} else {
None
}
})
}
fn compute_interesting_values(&mut self) {
debug_assert!(self.interesting_values32.is_empty());
debug_assert!(self.interesting_values64.is_empty());
let mut interesting_values32 = HashSet::new();
let mut interesting_values64 = HashSet::new();
let mut interesting = |val: u64| {
interesting_values32.insert(val as u32);
interesting_values64.insert(val);
};
interesting(0);
interesting(u8::MAX as _);
interesting(u16::MAX as _);
interesting(u32::MAX as _);
interesting(u64::MAX);
interesting(i8::MIN as _);
interesting(i16::MIN as _);
interesting(i32::MIN as _);
interesting(i64::MIN as _);
for i in 0..64 {
interesting(1 << i);
interesting(!(1 << i));
interesting((1 << i) - 1);
interesting(((1_i64 << 63) >> i) as _);
}
for pattern in [0b01010101, 0b00010001, 0b00010001, 0b00000001] {
for b in [pattern, !pattern] {
interesting(u64::from_ne_bytes([b, b, b, b, b, b, b, b]));
}
}
let mut interesting_f64 = |x: f64| interesting(x.to_bits());
interesting_f64(0.0);
interesting_f64(-0.0);
interesting_f64(f64::INFINITY);
interesting_f64(f64::NEG_INFINITY);
interesting_f64(f64::EPSILON);
interesting_f64(-f64::EPSILON);
interesting_f64(f64::MIN);
interesting_f64(f64::MIN_POSITIVE);
interesting_f64(f64::MAX);
interesting_f64(f64::NAN);
let mut interesting_f32 = |x: f32| interesting(x.to_bits() as _);
interesting_f32(0.0);
interesting_f32(-0.0);
interesting_f32(f32::INFINITY);
interesting_f32(f32::NEG_INFINITY);
interesting_f32(f32::EPSILON);
interesting_f32(-f32::EPSILON);
interesting_f32(f32::MIN);
interesting_f32(f32::MIN_POSITIVE);
interesting_f32(f32::MAX);
interesting_f32(f32::NAN);
for t in self.tables.iter() {
interesting(t.minimum as _);
if let Some(x) = t.minimum.checked_add(1) {
interesting(x as _);
}
if let Some(x) = t.maximum {
interesting(x as _);
if let Some(y) = x.checked_add(1) {
interesting(y as _);
}
}
}
for m in self.memories.iter() {
let min = m.minimum.saturating_mul(crate::page_size(m).into());
interesting(min);
for i in 0..5 {
if let Some(x) = min.checked_add(1 << i) {
interesting(x);
}
if let Some(x) = min.checked_sub(1 << i) {
interesting(x);
}
}
if let Some(max) = m.maximum {
let max = max.saturating_mul(crate::page_size(m).into());
interesting(max);
for i in 0..5 {
if let Some(x) = max.checked_add(1 << i) {
interesting(x);
}
if let Some(x) = max.checked_sub(1 << i) {
interesting(x);
}
}
}
}
self.interesting_values32.extend(interesting_values32);
self.interesting_values64.extend(interesting_values64);
self.interesting_values32.sort();
self.interesting_values64.sort();
}
fn arbitrary_const_instruction(
&self,
ty: ValType,
u: &mut Unstructured<'_>,
) -> Result<Instruction> {
debug_assert!(self.interesting_values32.len() > 0);
debug_assert!(self.interesting_values64.len() > 0);
match ty {
ValType::I32 => Ok(Instruction::I32Const(if u.arbitrary()? {
*u.choose(&self.interesting_values32)? as i32
} else {
u.arbitrary()?
})),
ValType::I64 => Ok(Instruction::I64Const(if u.arbitrary()? {
*u.choose(&self.interesting_values64)? as i64
} else {
u.arbitrary()?
})),
ValType::F32 => Ok(Instruction::F32Const(if u.arbitrary()? {
f32::from_bits(*u.choose(&self.interesting_values32)?)
} else {
u.arbitrary()?
})),
ValType::F64 => Ok(Instruction::F64Const(if u.arbitrary()? {
f64::from_bits(*u.choose(&self.interesting_values64)?)
} else {
u.arbitrary()?
})),
ValType::V128 => Ok(Instruction::V128Const(if u.arbitrary()? {
let upper = (*u.choose(&self.interesting_values64)? as i128) << 64;
let lower = *u.choose(&self.interesting_values64)? as i128;
upper | lower
} else {
u.arbitrary()?
})),
ValType::Ref(ty) => {
assert!(ty.nullable);
Ok(Instruction::RefNull(ty.heap_type))
}
}
}
fn propagate_shared<T>(&mut self, must_share: bool, mut f: impl FnMut(&mut Self) -> T) -> T {
let tmp = mem::replace(&mut self.must_share, must_share);
let result = f(self);
self.must_share = tmp;
result
}
fn arbitrary_shared(&self, u: &mut Unstructured) -> Result<bool> {
if self.must_share {
Ok(true)
} else {
Ok(self.config.shared_everything_threads_enabled && u.ratio(1, 4)?)
}
}
fn is_shared_ref_type(&self, ty: RefType) -> bool {
match ty.heap_type {
HeapType::Abstract { shared, .. } => shared,
HeapType::Concrete(i) => self.types[i as usize].composite_type.shared,
}
}
fn is_shared_type(&self, index: u32) -> bool {
let index = usize::try_from(index).unwrap();
let ty = self.types.get(index).unwrap();
ty.composite_type.shared
}
}
pub(crate) fn arbitrary_limits64(
u: &mut Unstructured,
min_minimum: Option<u64>,
max_minimum: u64,
max_required: bool,
max_inbounds: u64,
) -> Result<(u64, Option<u64>)> {
assert!(
min_minimum.unwrap_or(0) <= max_minimum,
"{} <= {max_minimum}",
min_minimum.unwrap_or(0),
);
assert!(
min_minimum.unwrap_or(0) <= max_inbounds,
"{} <= {max_inbounds}",
min_minimum.unwrap_or(0),
);
let min = gradually_grow(u, min_minimum.unwrap_or(0), max_inbounds, max_minimum)?;
assert!(min <= max_minimum, "{min} <= {max_minimum}");
let max = if max_required || u.arbitrary().unwrap_or(false) {
Some(u.int_in_range(min..=max_minimum)?)
} else {
None
};
assert!(min <= max.unwrap_or(min), "{min} <= {}", max.unwrap_or(min));
Ok((min, max))
}
pub(crate) fn configured_valtypes(config: &Config) -> Vec<ValType> {
let mut valtypes = Vec::with_capacity(25);
valtypes.push(ValType::I32);
valtypes.push(ValType::I64);
if config.allow_floats {
valtypes.push(ValType::F32);
valtypes.push(ValType::F64);
}
if config.simd_enabled {
valtypes.push(ValType::V128);
}
if config.gc_enabled && config.reference_types_enabled {
for nullable in [
true,
] {
use AbstractHeapType::*;
let abs_ref_types = [
Any, Eq, I31, Array, Struct, None, Func, NoFunc, Extern, NoExtern,
];
valtypes.extend(
abs_ref_types
.iter()
.map(|&ty| ValType::Ref(RefType::new_abstract(ty, nullable, false))),
);
if config.shared_everything_threads_enabled {
valtypes.extend(
abs_ref_types
.iter()
.map(|&ty| ValType::Ref(RefType::new_abstract(ty, nullable, true))),
);
}
}
} else if config.reference_types_enabled {
valtypes.push(ValType::EXTERNREF);
valtypes.push(ValType::FUNCREF);
}
valtypes
}
pub(crate) fn arbitrary_table_type(
u: &mut Unstructured,
config: &Config,
module: Option<&Module>,
) -> Result<TableType> {
let table64 = config.memory64_enabled && u.arbitrary()?;
let max_inbounds = 10_000;
let min_elements = if config.disallow_traps { Some(1) } else { None };
let max_elements = min_elements.unwrap_or(0).max(config.max_table_elements);
let (minimum, maximum) = arbitrary_limits64(
u,
min_elements,
max_elements,
config.table_max_size_required,
max_inbounds.min(max_elements),
)?;
if config.disallow_traps {
assert!(minimum > 0);
}
let element_type = match module {
Some(module) => module.arbitrary_ref_type(u)?,
None => RefType::FUNCREF,
};
let shared = match module {
Some(module) => module.is_shared_ref_type(element_type),
None => false,
};
Ok(TableType {
element_type,
minimum,
maximum,
table64,
shared,
})
}
pub(crate) fn arbitrary_memtype(u: &mut Unstructured, config: &Config) -> Result<MemoryType> {
let shared = config.threads_enabled && u.ratio(1, 4)?;
let memory64 = config.memory64_enabled && u.arbitrary()?;
let page_size_log2 = if config.custom_page_sizes_enabled && u.arbitrary()? {
Some(if u.arbitrary()? { 0 } else { 16 })
} else {
None
};
let min_pages = if config.disallow_traps { Some(1) } else { None };
let max_pages = min_pages.unwrap_or(0).max(if memory64 {
u64::try_from(config.max_memory64_bytes >> page_size_log2.unwrap_or(16))
.unwrap_or(u64::MAX as u64)
} else {
u32::try_from(config.max_memory32_bytes >> page_size_log2.unwrap_or(16))
.unwrap_or(u32::MAX)
.into()
});
let max_all_mems_in_bytes = 1 << 30;
let max_this_mem_in_bytes = max_all_mems_in_bytes / u64::try_from(config.max_memories).unwrap();
let max_inbounds = max_this_mem_in_bytes >> page_size_log2.unwrap_or(16);
let max_inbounds = max_inbounds.clamp(min_pages.unwrap_or(0), max_pages);
let (minimum, maximum) = arbitrary_limits64(
u,
min_pages,
max_pages,
config.memory_max_size_required || shared,
max_inbounds,
)?;
Ok(MemoryType {
minimum,
maximum,
memory64,
shared,
page_size_log2,
})
}
pub(crate) fn arbitrary_tag_type(
u: &mut Unstructured,
candidate_func_types: &[u32],
get_func_type: impl FnOnce(u32) -> Rc<FuncType>,
) -> Result<TagType> {
let max = candidate_func_types.len() - 1;
let ty = candidate_func_types[u.int_in_range(0..=max)?];
Ok(TagType {
func_type_idx: ty,
func_type: get_func_type(ty),
})
}
fn gradually_grow(u: &mut Unstructured, min: u64, max_inbounds: u64, max: u64) -> Result<u64> {
if min == max {
return Ok(min);
}
let x = {
let min = min as f64;
let max = max as f64;
let max_inbounds = max_inbounds as f64;
let x = u.arbitrary::<u32>()?;
let x = f64::from(x);
let x = map_custom(
x,
f64::from(u32::MIN)..f64::from(u32::MAX),
min..max_inbounds,
min..max,
);
assert!(min <= x, "{min} <= {x}");
assert!(x <= max, "{x} <= {max}");
x.round() as u64
};
return Ok(x.clamp(min, max));
fn map_custom(
value: f64,
input: Range<f64>,
output_inbounds: Range<f64>,
output: Range<f64>,
) -> f64 {
assert!(!value.is_nan(), "{}", value);
assert!(value.is_finite(), "{}", value);
assert!(input.start < input.end, "{} < {}", input.start, input.end);
assert!(
output.start < output.end,
"{} < {}",
output.start,
output.end
);
assert!(value >= input.start, "{} >= {}", value, input.start);
assert!(value <= input.end, "{} <= {}", value, input.end);
assert!(
output.start <= output_inbounds.start,
"{} <= {}",
output.start,
output_inbounds.start
);
assert!(
output_inbounds.end <= output.end,
"{} <= {}",
output_inbounds.end,
output.end
);
let x = map_linear(value, input, 0.0..1.0);
let result = if x < PCT_INBOUNDS {
if output_inbounds.start == output_inbounds.end {
output_inbounds.start
} else {
let unscaled = x * x * x * x * x * x;
map_linear(unscaled, 0.0..1.0, output_inbounds)
}
} else {
map_linear(x, 0.0..1.0, output.clone())
};
assert!(result >= output.start, "{} >= {}", result, output.start);
assert!(result <= output.end, "{} <= {}", result, output.end);
result
}
fn map_linear(
value: f64,
Range {
start: in_low,
end: in_high,
}: Range<f64>,
Range {
start: out_low,
end: out_high,
}: Range<f64>,
) -> f64 {
assert!(!value.is_nan(), "{}", value);
assert!(value.is_finite(), "{}", value);
assert!(in_low < in_high, "{} < {}", in_low, in_high);
assert!(out_low < out_high, "{} < {}", out_low, out_high);
assert!(value >= in_low, "{} >= {}", value, in_low);
assert!(value <= in_high, "{} <= {}", value, in_high);
let dividend = out_high - out_low;
let divisor = in_high - in_low;
let slope = dividend / divisor;
let result = out_low + (slope * (value - in_low));
assert!(result >= out_low, "{} >= {}", result, out_low);
assert!(result <= out_high, "{} <= {}", result, out_high);
result
}
}
fn arbitrary_offset(
u: &mut Unstructured,
limit_min: u64,
limit_max: u64,
segment_size: usize,
) -> Result<u64> {
let size = u64::try_from(segment_size).unwrap();
if size > limit_min {
u.int_in_range(0..=limit_max)
} else {
gradually_grow(u, 0, limit_min - size, limit_max)
}
}
fn unique_import_strings(max_size: usize, u: &mut Unstructured) -> Result<(String, String)> {
let module = limited_string(max_size, u)?;
let field = limited_string(max_size, u)?;
Ok((module, field))
}
fn arbitrary_vec_u8(u: &mut Unstructured) -> Result<Vec<u8>> {
let size = u.arbitrary_len::<u8>()?;
Ok(u.bytes(size)?.to_vec())
}
impl EntityType {
fn size(&self) -> u32 {
match self {
EntityType::Tag(_)
| EntityType::Global(_)
| EntityType::Table(_)
| EntityType::Memory(_) => 1,
EntityType::Func(_, ty) => 1 + (ty.params.len() + ty.results.len()) as u32,
}
}
}
#[derive(Clone, Copy, Debug, Default)]
#[cfg_attr(feature = "serde_derive", derive(serde_derive::Deserialize))]
pub struct InstructionKinds(pub(crate) FlagSet<InstructionKind>);
impl InstructionKinds {
pub fn new(kinds: &[InstructionKind]) -> Self {
Self(kinds.iter().fold(FlagSet::default(), |ks, k| ks | *k))
}
pub fn all() -> Self {
Self(FlagSet::full())
}
pub fn none() -> Self {
Self(FlagSet::default())
}
#[inline]
pub fn contains(&self, kind: InstructionKind) -> bool {
self.0.contains(kind)
}
pub fn without_floats(&self) -> Self {
let mut floatless = self.0;
if floatless.contains(InstructionKind::Numeric) {
floatless -= InstructionKind::Numeric;
floatless |= InstructionKind::NumericInt;
}
if floatless.contains(InstructionKind::Vector) {
floatless -= InstructionKind::Vector;
floatless |= InstructionKind::VectorInt;
}
if floatless.contains(InstructionKind::Memory) {
floatless -= InstructionKind::Memory;
floatless |= InstructionKind::MemoryInt;
}
Self(floatless)
}
}
flags! {
#[allow(missing_docs)]
#[cfg_attr(feature = "_internal_cli", derive(serde_derive::Deserialize))]
pub enum InstructionKind: u16 {
NumericInt = 1 << 0,
Numeric = (1 << 1) | (1 << 0),
VectorInt = 1 << 2,
Vector = (1 << 3) | (1 << 2),
Reference = 1 << 4,
Parametric = 1 << 5,
Variable = 1 << 6,
Table = 1 << 7,
MemoryInt = 1 << 8,
Memory = (1 << 9) | (1 << 8),
Control = 1 << 10,
Aggregate = 1 << 11,
}
}
impl FromStr for InstructionKinds {
type Err = String;
fn from_str(s: &str) -> std::prelude::v1::Result<Self, Self::Err> {
let mut kinds = vec![];
for part in s.split(",") {
let kind = InstructionKind::from_str(part)?;
kinds.push(kind);
}
Ok(InstructionKinds::new(&kinds))
}
}
impl FromStr for InstructionKind {
type Err = String;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"numeric_non_float" => Ok(InstructionKind::NumericInt),
"numeric" => Ok(InstructionKind::Numeric),
"vector_non_float" => Ok(InstructionKind::VectorInt),
"vector" => Ok(InstructionKind::Vector),
"reference" => Ok(InstructionKind::Reference),
"parametric" => Ok(InstructionKind::Parametric),
"variable" => Ok(InstructionKind::Variable),
"table" => Ok(InstructionKind::Table),
"memory_non_float" => Ok(InstructionKind::MemoryInt),
"memory" => Ok(InstructionKind::Memory),
"control" => Ok(InstructionKind::Control),
_ => Err(format!("unknown instruction kind: {}", s)),
}
}
}