pub enum MemoryInitialization {
Segmented(Vec<MemoryInitializer>),
Static {
map: PrimaryMap<MemoryIndex, Option<StaticMemoryInitializer>>,
},
}
Expand description
The type of WebAssembly linear memory initialization to use for a module.
Variants§
Segmented(Vec<MemoryInitializer>)
Memory initialization is segmented.
Segmented initialization can be used for any module, but it is required if:
- A data segment referenced an imported memory.
- A data segment uses a global base.
Segmented initialization is performed by processing the complete set of data segments when the module is instantiated.
This is the default memory initialization type.
Static
Fields
map: PrimaryMap<MemoryIndex, Option<StaticMemoryInitializer>>
The initialization contents for each linear memory.
This array has, for each module’s own linear memory, the contents
necessary to initialize it. If the memory has a None
value then no
initialization is necessary (it’s zero-filled). Otherwise with
Some
the first element of the tuple is the offset in memory to
start the initialization and the Range
is the range within the
final data section of the compiled module of bytes to copy into the
memory.
The offset, range base, and range end are all guaranteed to be page
aligned to the page size passed in to try_static_init
.
Memory initialization is statically known and involves a single memcpy
or otherwise simply making the defined data visible.
To be statically initialized everything must reference a defined memory and all data segments have a statically known in-bounds base (no globals).
This form of memory initialization is a more optimized version of
Segmented
where memory can be initialized with one of a few methods:
- First it could be initialized with a single
memcpy
of data from the module to the linear memory. - Otherwise techniques like
mmap
are also possible to make this data, which might reside in a compiled module on disk, available immediately in a linear memory’s address space.
To facilitate the latter of these techniques the try_static_init
function below, which creates this variant, takes a host page size
argument which can page-align everything to make mmap-ing possible.
Implementations§
source§impl MemoryInitialization
impl MemoryInitialization
sourcepub fn is_segmented(&self) -> bool
pub fn is_segmented(&self) -> bool
Returns whether this initialization is of the form
MemoryInitialization::Segmented
.
Examples found in repository?
214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391
pub fn try_static_init(&mut self, page_size: u64, max_image_size_always_allowed: u64) {
// This method only attempts to transform a `Segmented` memory init
// into a `Static` one, no other state.
if !self.module.memory_initialization.is_segmented() {
return;
}
// First a dry run of memory initialization is performed. This
// collects information about the extent of memory initialized for each
// memory as well as the size of all data segments being copied in.
struct Memory {
data_size: u64,
min_addr: u64,
max_addr: u64,
// The `usize` here is a pointer into `self.data` which is the list
// of data segments corresponding to what was found in the original
// wasm module.
segments: Vec<(usize, StaticMemoryInitializer)>,
}
let mut info = PrimaryMap::with_capacity(self.module.memory_plans.len());
for _ in 0..self.module.memory_plans.len() {
info.push(Memory {
data_size: 0,
min_addr: u64::MAX,
max_addr: 0,
segments: Vec::new(),
});
}
let mut idx = 0;
let ok = self.module.memory_initialization.init_memory(
InitMemory::CompileTime(&self.module),
&mut |memory, init| {
// Currently `Static` only applies to locally-defined memories,
// so if a data segment references an imported memory then
// transitioning to a `Static` memory initializer is not
// possible.
if self.module.defined_memory_index(memory).is_none() {
return false;
};
let info = &mut info[memory];
let data_len = u64::from(init.data.end - init.data.start);
if data_len > 0 {
info.data_size += data_len;
info.min_addr = info.min_addr.min(init.offset);
info.max_addr = info.max_addr.max(init.offset + data_len);
info.segments.push((idx, init.clone()));
}
idx += 1;
true
},
);
if !ok {
return;
}
// Validate that the memory information collected is indeed valid for
// static memory initialization.
for info in info.values().filter(|i| i.data_size > 0) {
let image_size = info.max_addr - info.min_addr;
// If the range of memory being initialized is less than twice the
// total size of the data itself then it's assumed that static
// initialization is ok. This means we'll at most double memory
// consumption during the memory image creation process, which is
// currently assumed to "probably be ok" but this will likely need
// tweaks over time.
if image_size < info.data_size.saturating_mul(2) {
continue;
}
// If the memory initialization image is larger than the size of all
// data, then we still allow memory initialization if the image will
// be of a relatively modest size, such as 1MB here.
if image_size < max_image_size_always_allowed {
continue;
}
// At this point memory initialization is concluded to be too
// expensive to do at compile time so it's entirely deferred to
// happen at runtime.
return;
}
// Here's where we've now committed to changing to static memory. The
// memory initialization image is built here from the page data and then
// it's converted to a single initializer.
let data = mem::replace(&mut self.data, Vec::new());
let mut map = PrimaryMap::with_capacity(info.len());
let mut module_data_size = 0u32;
for (memory, info) in info.iter() {
// Create the in-memory `image` which is the initialized contents of
// this linear memory.
let extent = if info.segments.len() > 0 {
(info.max_addr - info.min_addr) as usize
} else {
0
};
let mut image = Vec::with_capacity(extent);
for (idx, init) in info.segments.iter() {
let data = &data[*idx];
assert_eq!(data.len(), init.data.len());
let offset = usize::try_from(init.offset - info.min_addr).unwrap();
if image.len() < offset {
image.resize(offset, 0u8);
image.extend_from_slice(data);
} else {
image.splice(
offset..(offset + data.len()).min(image.len()),
data.iter().copied(),
);
}
}
assert_eq!(image.len(), extent);
assert_eq!(image.capacity(), extent);
let mut offset = if info.segments.len() > 0 {
info.min_addr
} else {
0
};
// Chop off trailing zeros from the image as memory is already
// zero-initialized. Note that `i` is the position of a nonzero
// entry here, so to not lose it we truncate to `i + 1`.
if let Some(i) = image.iter().rposition(|i| *i != 0) {
image.truncate(i + 1);
}
// Also chop off leading zeros, if any.
if let Some(i) = image.iter().position(|i| *i != 0) {
offset += i as u64;
image.drain(..i);
}
let mut len = u64::try_from(image.len()).unwrap();
// The goal is to enable mapping this image directly into memory, so
// the offset into linear memory must be a multiple of the page
// size. If that's not already the case then the image is padded at
// the front and back with extra zeros as necessary
if offset % page_size != 0 {
let zero_padding = offset % page_size;
self.data.push(vec![0; zero_padding as usize].into());
offset -= zero_padding;
len += zero_padding;
}
self.data.push(image.into());
if len % page_size != 0 {
let zero_padding = page_size - (len % page_size);
self.data.push(vec![0; zero_padding as usize].into());
len += zero_padding;
}
// Offset/length should now always be page-aligned.
assert!(offset % page_size == 0);
assert!(len % page_size == 0);
// Create the `StaticMemoryInitializer` which describes this image,
// only needed if the image is actually present and has a nonzero
// length. The `offset` has been calculates above, originally
// sourced from `info.min_addr`. The `data` field is the extent
// within the final data segment we'll emit to an ELF image, which
// is the concatenation of `self.data`, so here it's the size of
// the section-so-far plus the current segment we're appending.
let len = u32::try_from(len).unwrap();
let init = if len > 0 {
Some(StaticMemoryInitializer {
offset,
data: module_data_size..module_data_size + len,
})
} else {
None
};
let idx = map.push(init);
assert_eq!(idx, memory);
module_data_size += len;
}
self.data_align = Some(page_size);
self.module.memory_initialization = MemoryInitialization::Static { map };
}
sourcepub fn init_memory(
&self,
state: InitMemory<'_>,
write: &mut dyn FnMut(MemoryIndex, &StaticMemoryInitializer) -> bool
) -> bool
pub fn init_memory(
&self,
state: InitMemory<'_>,
write: &mut dyn FnMut(MemoryIndex, &StaticMemoryInitializer) -> bool
) -> bool
Performs the memory initialization steps for this set of initializers.
This will perform wasm initialization in compliance with the wasm spec and how data segments are processed. This doesn’t need to necessarily only be called as part of initialization, however, as it’s structured to allow learning about memory ahead-of-time at compile time possibly.
The various callbacks provided here are used to drive the smaller bits of initialization, such as:
-
get_cur_size_in_pages
- gets the current size, in wasm pages, of the memory specified. For compile-time purposes this would be the memory type’s minimum size. -
get_global
- gets the value of the global specified. This is statically, via validation, a pointer to the global of the correct type (either u32 or u64 depending on the memory), but the value returned here isu64
. ANone
value can be returned to indicate that the global’s value isn’t known yet. -
write
- a callback used to actually write data. This indicates that the specified memory must receive the specified range of data at the specified offset. This can internally return an false error if it wants to fail.
This function will return true if all memory initializers are processed
successfully. If any initializer hits an error or, for example, a
global value is needed but None
is returned, then false will be
returned. At compile-time this typically means that the “error” in
question needs to be deferred to runtime, and at runtime this means
that an invalid initializer has been found and a trap should be
generated.
Examples found in repository?
214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391
pub fn try_static_init(&mut self, page_size: u64, max_image_size_always_allowed: u64) {
// This method only attempts to transform a `Segmented` memory init
// into a `Static` one, no other state.
if !self.module.memory_initialization.is_segmented() {
return;
}
// First a dry run of memory initialization is performed. This
// collects information about the extent of memory initialized for each
// memory as well as the size of all data segments being copied in.
struct Memory {
data_size: u64,
min_addr: u64,
max_addr: u64,
// The `usize` here is a pointer into `self.data` which is the list
// of data segments corresponding to what was found in the original
// wasm module.
segments: Vec<(usize, StaticMemoryInitializer)>,
}
let mut info = PrimaryMap::with_capacity(self.module.memory_plans.len());
for _ in 0..self.module.memory_plans.len() {
info.push(Memory {
data_size: 0,
min_addr: u64::MAX,
max_addr: 0,
segments: Vec::new(),
});
}
let mut idx = 0;
let ok = self.module.memory_initialization.init_memory(
InitMemory::CompileTime(&self.module),
&mut |memory, init| {
// Currently `Static` only applies to locally-defined memories,
// so if a data segment references an imported memory then
// transitioning to a `Static` memory initializer is not
// possible.
if self.module.defined_memory_index(memory).is_none() {
return false;
};
let info = &mut info[memory];
let data_len = u64::from(init.data.end - init.data.start);
if data_len > 0 {
info.data_size += data_len;
info.min_addr = info.min_addr.min(init.offset);
info.max_addr = info.max_addr.max(init.offset + data_len);
info.segments.push((idx, init.clone()));
}
idx += 1;
true
},
);
if !ok {
return;
}
// Validate that the memory information collected is indeed valid for
// static memory initialization.
for info in info.values().filter(|i| i.data_size > 0) {
let image_size = info.max_addr - info.min_addr;
// If the range of memory being initialized is less than twice the
// total size of the data itself then it's assumed that static
// initialization is ok. This means we'll at most double memory
// consumption during the memory image creation process, which is
// currently assumed to "probably be ok" but this will likely need
// tweaks over time.
if image_size < info.data_size.saturating_mul(2) {
continue;
}
// If the memory initialization image is larger than the size of all
// data, then we still allow memory initialization if the image will
// be of a relatively modest size, such as 1MB here.
if image_size < max_image_size_always_allowed {
continue;
}
// At this point memory initialization is concluded to be too
// expensive to do at compile time so it's entirely deferred to
// happen at runtime.
return;
}
// Here's where we've now committed to changing to static memory. The
// memory initialization image is built here from the page data and then
// it's converted to a single initializer.
let data = mem::replace(&mut self.data, Vec::new());
let mut map = PrimaryMap::with_capacity(info.len());
let mut module_data_size = 0u32;
for (memory, info) in info.iter() {
// Create the in-memory `image` which is the initialized contents of
// this linear memory.
let extent = if info.segments.len() > 0 {
(info.max_addr - info.min_addr) as usize
} else {
0
};
let mut image = Vec::with_capacity(extent);
for (idx, init) in info.segments.iter() {
let data = &data[*idx];
assert_eq!(data.len(), init.data.len());
let offset = usize::try_from(init.offset - info.min_addr).unwrap();
if image.len() < offset {
image.resize(offset, 0u8);
image.extend_from_slice(data);
} else {
image.splice(
offset..(offset + data.len()).min(image.len()),
data.iter().copied(),
);
}
}
assert_eq!(image.len(), extent);
assert_eq!(image.capacity(), extent);
let mut offset = if info.segments.len() > 0 {
info.min_addr
} else {
0
};
// Chop off trailing zeros from the image as memory is already
// zero-initialized. Note that `i` is the position of a nonzero
// entry here, so to not lose it we truncate to `i + 1`.
if let Some(i) = image.iter().rposition(|i| *i != 0) {
image.truncate(i + 1);
}
// Also chop off leading zeros, if any.
if let Some(i) = image.iter().position(|i| *i != 0) {
offset += i as u64;
image.drain(..i);
}
let mut len = u64::try_from(image.len()).unwrap();
// The goal is to enable mapping this image directly into memory, so
// the offset into linear memory must be a multiple of the page
// size. If that's not already the case then the image is padded at
// the front and back with extra zeros as necessary
if offset % page_size != 0 {
let zero_padding = offset % page_size;
self.data.push(vec![0; zero_padding as usize].into());
offset -= zero_padding;
len += zero_padding;
}
self.data.push(image.into());
if len % page_size != 0 {
let zero_padding = page_size - (len % page_size);
self.data.push(vec![0; zero_padding as usize].into());
len += zero_padding;
}
// Offset/length should now always be page-aligned.
assert!(offset % page_size == 0);
assert!(len % page_size == 0);
// Create the `StaticMemoryInitializer` which describes this image,
// only needed if the image is actually present and has a nonzero
// length. The `offset` has been calculates above, originally
// sourced from `info.min_addr`. The `data` field is the extent
// within the final data segment we'll emit to an ELF image, which
// is the concatenation of `self.data`, so here it's the size of
// the section-so-far plus the current segment we're appending.
let len = u32::try_from(len).unwrap();
let init = if len > 0 {
Some(StaticMemoryInitializer {
offset,
data: module_data_size..module_data_size + len,
})
} else {
None
};
let idx = map.push(init);
assert_eq!(idx, memory);
module_data_size += len;
}
self.data_align = Some(page_size);
self.module.memory_initialization = MemoryInitialization::Static { map };
}