Compare commits

...

3 Commits

Author SHA1 Message Date
Sergey Pepyakin e8151ed272 Optimize pushing the stack 2019-07-16 18:13:46 +03:00
Sergey Pepyakin 358adb334a Cache bytebuf 2019-07-15 23:28:53 +03:00
Sergey Pepyakin 97661cf3fe CoW 2019-07-15 22:17:25 +03:00
5 changed files with 173 additions and 55 deletions

View File

@ -404,7 +404,7 @@ pub use self::func::{FuncInstance, FuncInvocation, FuncRef, ResumableError};
pub use self::global::{GlobalInstance, GlobalRef}; pub use self::global::{GlobalInstance, GlobalRef};
pub use self::host::{Externals, HostError, NopExternals, RuntimeArgs}; pub use self::host::{Externals, HostError, NopExternals, RuntimeArgs};
pub use self::imports::{ImportResolver, ImportsBuilder, ModuleImportResolver}; pub use self::imports::{ImportResolver, ImportsBuilder, ModuleImportResolver};
pub use self::memory::{MemoryInstance, MemoryRef, LINEAR_MEMORY_PAGE_SIZE}; pub use self::memory::{MemoryBackend, ByteBuf, MemoryInstance, MemoryRef, LINEAR_MEMORY_PAGE_SIZE};
pub use self::module::{ExternVal, ModuleInstance, ModuleRef, NotStartedModuleRef}; pub use self::module::{ExternVal, ModuleInstance, ModuleRef, NotStartedModuleRef};
pub use self::runner::{StackRecycler, DEFAULT_CALL_STACK_LIMIT, DEFAULT_VALUE_STACK_LIMIT}; pub use self::runner::{StackRecycler, DEFAULT_CALL_STACK_LIMIT, DEFAULT_VALUE_STACK_LIMIT};
pub use self::table::{TableInstance, TableRef}; pub use self::table::{TableInstance, TableRef};

View File

@ -7,6 +7,7 @@
use std::ptr::{self, NonNull}; use std::ptr::{self, NonNull};
use std::slice; use std::slice;
use super::{MemoryBackend, ByteBuf};
struct Mmap { struct Mmap {
/// The pointer that points to the start of the mapping. /// The pointer that points to the start of the mapping.
@ -111,11 +112,15 @@ impl Drop for Mmap {
} }
} }
pub struct ByteBuf { pub struct MmapByteBuf {
mmap: Option<Mmap>, mmap: Option<Mmap>,
} }
impl ByteBuf { impl MmapByteBuf {
pub fn empty() -> Self {
MmapByteBuf { mmap: None }
}
pub fn new(len: usize) -> Result<Self, &'static str> { pub fn new(len: usize) -> Result<Self, &'static str> {
let mmap = if len == 0 { let mmap = if len == 0 {
None None
@ -124,8 +129,14 @@ impl ByteBuf {
}; };
Ok(Self { mmap }) Ok(Self { mmap })
} }
}
pub fn realloc(&mut self, new_len: usize) -> Result<(), &'static str> { impl MemoryBackend for MmapByteBuf {
fn alloc(&mut self, initial: usize, _maximum: Option<usize>) -> Result<ByteBuf, &'static str> {
self.realloc(initial)
}
fn realloc(&mut self, new_len: usize) -> Result<ByteBuf, &'static str> {
let new_mmap = if new_len == 0 { let new_mmap = if new_len == 0 {
None None
} else { } else {
@ -139,27 +150,16 @@ impl ByteBuf {
Some(new_mmap) Some(new_mmap)
}; };
let bytebuf = ByteBuf {
ptr: new_mmap.as_ref().map(|m| m.ptr.as_ptr()).unwrap_or(NonNull::dangling().as_ptr()),
len: new_mmap.as_ref().map(|m| m.len).unwrap_or(0),
};
self.mmap = new_mmap; self.mmap = new_mmap;
Ok(()) Ok(bytebuf)
} }
pub fn len(&self) -> usize { fn erase(&mut self) -> Result<(), &'static str> {
self.mmap.as_ref().map(|m| m.len).unwrap_or(0) let len = self.mmap.as_ref().map(|m| m.len).unwrap_or(0);
}
pub fn as_slice(&self) -> &[u8] {
self.mmap.as_ref().map(|m| m.as_slice()).unwrap_or(&[])
}
pub fn as_slice_mut(&mut self) -> &mut [u8] {
self.mmap
.as_mut()
.map(|m| m.as_slice_mut())
.unwrap_or(&mut [])
}
pub fn erase(&mut self) -> Result<(), &'static str> {
let len = self.len();
if len > 0 { if len > 0 {
// The order is important. // The order is important.
// //
@ -176,14 +176,14 @@ impl ByteBuf {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::ByteBuf; use super::{MmapByteBuf, MemoryBackend};
const PAGE_SIZE: usize = 4096; const PAGE_SIZE: usize = 4096;
// This is not required since wasm memories can only grow but nice to have. // This is not required since wasm memories can only grow but nice to have.
#[test] #[test]
fn byte_buf_shrink() { fn byte_buf_shrink() {
let mut byte_buf = ByteBuf::new(PAGE_SIZE * 3).unwrap(); let mut byte_buf = MmapByteBuf::new(PAGE_SIZE * 3).unwrap();
byte_buf.realloc(PAGE_SIZE * 2).unwrap(); byte_buf.realloc(PAGE_SIZE * 2).unwrap();
} }
} }

View File

@ -3,6 +3,7 @@ use core::{
cell::{Cell, RefCell}, cell::{Cell, RefCell},
cmp, fmt, cmp, fmt,
ops::Range, ops::Range,
slice,
u32, u32,
}; };
use memory_units::{Bytes, Pages, RoundUpTo}; use memory_units::{Bytes, Pages, RoundUpTo};
@ -11,14 +12,14 @@ use value::LittleEndianConvert;
use Error; use Error;
#[cfg(all(unix, not(feature = "vec_memory")))] #[cfg(all(unix, not(feature = "vec_memory")))]
#[path = "mmap_bytebuf.rs"] #[path="mmap_bytebuf.rs"]
mod bytebuf; mod mmap_bytebuf;
#[cfg(any(not(unix), feature = "vec_memory"))] #[cfg(all(unix, not(feature = "vec_memory")))]
#[path = "vec_bytebuf.rs"] use self::mmap_bytebuf::MmapByteBuf;
mod bytebuf;
use self::bytebuf::ByteBuf; // #[cfg(any(not(unix), feature = "vec_memory"))]
// mod bytebuf;
/// Size of a page of [linear memory][`MemoryInstance`] - 64KiB. /// Size of a page of [linear memory][`MemoryInstance`] - 64KiB.
/// ///
@ -43,6 +44,35 @@ impl ::core::ops::Deref for MemoryRef {
} }
} }
pub struct ByteBuf {
pub ptr: *mut u8,
pub len: usize,
}
impl ByteBuf {
pub fn as_slice(&self) -> &[u8] {
unsafe {
slice::from_raw_parts(self.ptr, self.len)
}
}
pub fn as_slice_mut(&mut self) -> &mut [u8] {
unsafe {
slice::from_raw_parts_mut(self.ptr, self.len)
}
}
pub fn len(&self) -> usize {
self.len
}
}
pub trait MemoryBackend {
fn alloc(&mut self, initial: usize, maximum: Option<usize>) -> Result<ByteBuf, &'static str>;
fn realloc(&mut self, new_len: usize) -> Result<ByteBuf, &'static str>;
fn erase(&mut self) -> Result<(), &'static str>;
}
/// Runtime representation of a linear memory (or `memory` for short). /// Runtime representation of a linear memory (or `memory` for short).
/// ///
/// A memory is a contiguous, mutable array of raw bytes. Wasm code can load and store values /// A memory is a contiguous, mutable array of raw bytes. Wasm code can load and store values
@ -60,7 +90,8 @@ pub struct MemoryInstance {
/// Memory limits. /// Memory limits.
limits: ResizableLimits, limits: ResizableLimits,
/// Linear memory buffer with lazy allocation. /// Linear memory buffer with lazy allocation.
buffer: RefCell<ByteBuf>, backend: RefCell<Box<dyn MemoryBackend>>,
bytebuf: RefCell<ByteBuf>,
initial: Pages, initial: Pages,
current_size: Cell<usize>, current_size: Cell<usize>,
maximum: Option<Pages>, maximum: Option<Pages>,
@ -70,7 +101,7 @@ impl fmt::Debug for MemoryInstance {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("MemoryInstance") f.debug_struct("MemoryInstance")
.field("limits", &self.limits) .field("limits", &self.limits)
.field("buffer.len", &self.buffer.borrow().len()) .field("buffer.len", &self.bytebuf.borrow().len())
.field("maximum", &self.maximum) .field("maximum", &self.maximum)
.field("initial", &self.initial) .field("initial", &self.initial)
.finish() .finish()
@ -142,17 +173,35 @@ impl MemoryInstance {
let limits = ResizableLimits::new(initial.0 as u32, maximum.map(|p| p.0 as u32)); let limits = ResizableLimits::new(initial.0 as u32, maximum.map(|p| p.0 as u32));
let initial_size: Bytes = initial.into(); let initial_size: Bytes = initial.into();
let maximum_size: Option<Bytes> = maximum.map(|m| m.into());
let mut backend = MmapByteBuf::new(initial_size.0).map_err(|err| Error::Memory(err.to_string()))?;
let bytebuf = backend.alloc(
initial_size.0,
maximum_size.map(|m| m.0),
).map_err(|err| Error::Memory(err.to_string()))?;
Ok(MemoryInstance { Ok(MemoryInstance {
limits: limits, limits: limits,
buffer: RefCell::new( backend: RefCell::new(Box::new(backend)),
ByteBuf::new(initial_size.0).map_err(|err| Error::Memory(err.to_string()))?, bytebuf: RefCell::new(bytebuf),
),
initial: initial, initial: initial,
current_size: Cell::new(initial_size.0), current_size: Cell::new(initial_size.0),
maximum: maximum, maximum: maximum,
}) })
} }
pub fn set_backend(&self, mut backend: Box<dyn MemoryBackend>) {
let initial_size: Bytes = self.initial.into();
let maximum_size: Option<Bytes> = self.maximum.map(|m| m.into());
let bytebuf = backend.alloc(
initial_size.0,
maximum_size.map(|m| m.0),
).unwrap();
*self.backend.borrow_mut() = backend;
*self.bytebuf.borrow_mut() = bytebuf;
}
/// Return linear memory limits. /// Return linear memory limits.
pub(crate) fn limits(&self) -> &ResizableLimits { pub(crate) fn limits(&self) -> &ResizableLimits {
&self.limits &self.limits
@ -191,12 +240,12 @@ impl MemoryInstance {
/// ); /// );
/// ``` /// ```
pub fn current_size(&self) -> Pages { pub fn current_size(&self) -> Pages {
Bytes(self.buffer.borrow().len()).round_up_to() Bytes(self.bytebuf.borrow().len()).round_up_to()
} }
/// Get value from memory at given offset. /// Get value from memory at given offset.
pub fn get_value<T: LittleEndianConvert>(&self, offset: u32) -> Result<T, Error> { pub fn get_value<T: LittleEndianConvert>(&self, offset: u32) -> Result<T, Error> {
let mut buffer = self.buffer.borrow_mut(); let mut buffer = self.bytebuf.borrow_mut();
let region = let region =
self.checked_region(&mut buffer, offset as usize, ::core::mem::size_of::<T>())?; self.checked_region(&mut buffer, offset as usize, ::core::mem::size_of::<T>())?;
Ok( Ok(
@ -212,7 +261,7 @@ impl MemoryInstance {
/// ///
/// [`get_into`]: #method.get_into /// [`get_into`]: #method.get_into
pub fn get(&self, offset: u32, size: usize) -> Result<Vec<u8>, Error> { pub fn get(&self, offset: u32, size: usize) -> Result<Vec<u8>, Error> {
let mut buffer = self.buffer.borrow_mut(); let mut buffer = self.bytebuf.borrow_mut();
let region = self.checked_region(&mut buffer, offset as usize, size)?; let region = self.checked_region(&mut buffer, offset as usize, size)?;
Ok(buffer.as_slice_mut()[region.range()].to_vec()) Ok(buffer.as_slice_mut()[region.range()].to_vec())
@ -224,7 +273,7 @@ impl MemoryInstance {
/// ///
/// Returns `Err` if the specified region is out of bounds. /// Returns `Err` if the specified region is out of bounds.
pub fn get_into(&self, offset: u32, target: &mut [u8]) -> Result<(), Error> { pub fn get_into(&self, offset: u32, target: &mut [u8]) -> Result<(), Error> {
let mut buffer = self.buffer.borrow_mut(); let mut buffer = self.bytebuf.borrow_mut();
let region = self.checked_region(&mut buffer, offset as usize, target.len())?; let region = self.checked_region(&mut buffer, offset as usize, target.len())?;
target.copy_from_slice(&buffer.as_slice_mut()[region.range()]); target.copy_from_slice(&buffer.as_slice_mut()[region.range()]);
@ -234,7 +283,7 @@ impl MemoryInstance {
/// Copy data in the memory at given offset. /// Copy data in the memory at given offset.
pub fn set(&self, offset: u32, value: &[u8]) -> Result<(), Error> { pub fn set(&self, offset: u32, value: &[u8]) -> Result<(), Error> {
let mut buffer = self.buffer.borrow_mut(); let mut buffer = self.bytebuf.borrow_mut();
let range = self let range = self
.checked_region(&mut buffer, offset as usize, value.len())? .checked_region(&mut buffer, offset as usize, value.len())?
.range(); .range();
@ -246,7 +295,7 @@ impl MemoryInstance {
/// Copy value in the memory at given offset. /// Copy value in the memory at given offset.
pub fn set_value<T: LittleEndianConvert>(&self, offset: u32, value: T) -> Result<(), Error> { pub fn set_value<T: LittleEndianConvert>(&self, offset: u32, value: T) -> Result<(), Error> {
let mut buffer = self.buffer.borrow_mut(); let mut buffer = self.bytebuf.borrow_mut();
let range = self let range = self
.checked_region(&mut buffer, offset as usize, ::core::mem::size_of::<T>())? .checked_region(&mut buffer, offset as usize, ::core::mem::size_of::<T>())?
.range(); .range();
@ -284,10 +333,11 @@ impl MemoryInstance {
} }
let new_buffer_length: Bytes = new_size.into(); let new_buffer_length: Bytes = new_size.into();
self.buffer let bytebuf = self.backend
.borrow_mut() .borrow_mut()
.realloc(new_buffer_length.0) .realloc(new_buffer_length.0)
.map_err(|err| Error::Memory(err.to_string()))?; .map_err(|err| Error::Memory(err.to_string()))?;
*self.bytebuf.borrow_mut() = bytebuf;
self.current_size.set(new_buffer_length.0); self.current_size.set(new_buffer_length.0);
@ -382,7 +432,7 @@ impl MemoryInstance {
/// ///
/// Returns `Err` if either of specified regions is out of bounds. /// Returns `Err` if either of specified regions is out of bounds.
pub fn copy(&self, src_offset: usize, dst_offset: usize, len: usize) -> Result<(), Error> { pub fn copy(&self, src_offset: usize, dst_offset: usize, len: usize) -> Result<(), Error> {
let mut buffer = self.buffer.borrow_mut(); let mut buffer = self.bytebuf.borrow_mut();
let (read_region, write_region) = let (read_region, write_region) =
self.checked_region_pair(&mut buffer, src_offset, len, dst_offset, len)?; self.checked_region_pair(&mut buffer, src_offset, len, dst_offset, len)?;
@ -415,7 +465,7 @@ impl MemoryInstance {
dst_offset: usize, dst_offset: usize,
len: usize, len: usize,
) -> Result<(), Error> { ) -> Result<(), Error> {
let mut buffer = self.buffer.borrow_mut(); let mut buffer = self.bytebuf.borrow_mut();
let (read_region, write_region) = let (read_region, write_region) =
self.checked_region_pair(&mut buffer, src_offset, len, dst_offset, len)?; self.checked_region_pair(&mut buffer, src_offset, len, dst_offset, len)?;
@ -455,8 +505,8 @@ impl MemoryInstance {
// Because memory references point to different memory instances, it is safe to `borrow_mut` // Because memory references point to different memory instances, it is safe to `borrow_mut`
// both buffers at once (modulo `with_direct_access_mut`). // both buffers at once (modulo `with_direct_access_mut`).
let mut src_buffer = src.buffer.borrow_mut(); let mut src_buffer = src.bytebuf.borrow_mut();
let mut dst_buffer = dst.buffer.borrow_mut(); let mut dst_buffer = dst.bytebuf.borrow_mut();
let src_range = src let src_range = src
.checked_region(&mut src_buffer, src_offset, len)? .checked_region(&mut src_buffer, src_offset, len)?
@ -478,7 +528,7 @@ impl MemoryInstance {
/// ///
/// Returns `Err` if the specified region is out of bounds. /// Returns `Err` if the specified region is out of bounds.
pub fn clear(&self, offset: usize, new_val: u8, len: usize) -> Result<(), Error> { pub fn clear(&self, offset: usize, new_val: u8, len: usize) -> Result<(), Error> {
let mut buffer = self.buffer.borrow_mut(); let mut buffer = self.bytebuf.borrow_mut();
let range = self.checked_region(&mut buffer, offset, len)?.range(); let range = self.checked_region(&mut buffer, offset, len)?.range();
@ -501,7 +551,7 @@ impl MemoryInstance {
/// ///
/// Might be useful for some optimization shenanigans. /// Might be useful for some optimization shenanigans.
pub fn erase(&self) -> Result<(), Error> { pub fn erase(&self) -> Result<(), Error> {
self.buffer self.backend
.borrow_mut() .borrow_mut()
.erase() .erase()
.map_err(|err| Error::Memory(err.to_string())) .map_err(|err| Error::Memory(err.to_string()))
@ -517,7 +567,7 @@ impl MemoryInstance {
/// [`set`]: #method.get /// [`set`]: #method.get
/// [`clear`]: #method.set /// [`clear`]: #method.set
pub fn with_direct_access<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R { pub fn with_direct_access<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
let buf = self.buffer.borrow(); let buf = self.bytebuf.borrow();
f(buf.as_slice()) f(buf.as_slice())
} }
@ -531,7 +581,7 @@ impl MemoryInstance {
/// [`get`]: #method.get /// [`get`]: #method.get
/// [`set`]: #method.set /// [`set`]: #method.set
pub fn with_direct_access_mut<R, F: FnOnce(&mut [u8]) -> R>(&self, f: F) -> R { pub fn with_direct_access_mut<R, F: FnOnce(&mut [u8]) -> R>(&self, f: F) -> R {
let mut buf = self.buffer.borrow_mut(); let mut buf = self.bytebuf.borrow_mut();
f(buf.as_slice_mut()) f(buf.as_slice_mut())
} }
} }

71
src/memory/raw_bytebuf.rs Normal file
View File

@ -0,0 +1,71 @@
//! An implementation of `ByteBuf` based on a plain `Vec`.
use alloc::vec::Vec;
use std::{
slice,
mem,
};
use super::MemoryBackend;
pub struct RawByteBuf {
ptr: *mut u8,
len: usize,
cap: usize,
}
impl RawByteBuf {
pub fn from_raw_parts(ptr: *mut u8, len: usize, cap: usize) -> Self {
Self {
ptr,
len,
cap,
}
}
pub fn new(len: usize) -> Result<Self, &'static str> {
let mut v = vec![0u8; len];
let cap = len;
let ptr = v.as_mut_ptr();
mem::forget(v);
Ok(Self {
ptr,
len,
cap,
})
}
}
impl MemoryBackend for RawByteBuf {
pub fn realloc(&mut self, new_len: usize) -> Result<(), &'static str> {
if new_len > self.cap {
return Err("exceeds cap");
}
self.len = new_len;
Ok(())
}
pub fn len(&self) -> usize {
self.len
}
pub fn as_slice(&self) -> &[u8] {
unsafe {
slice::from_raw_parts(self.ptr, self.len)
}
}
pub fn as_slice_mut(&mut self) -> &mut [u8] {
unsafe {
slice::from_raw_parts_mut(self.ptr, self.len)
}
}
pub fn erase(&mut self) -> Result<(), &'static str> {
for v in self.as_slice_mut() {
*v = 0;
}
Ok(())
}
}

View File

@ -1289,12 +1289,9 @@ impl FunctionContext {
debug_assert!(!self.is_initialized); debug_assert!(!self.is_initialized);
let num_locals = locals.iter().map(|l| l.count() as usize).sum(); let num_locals = locals.iter().map(|l| l.count() as usize).sum();
let locals = vec![Default::default(); num_locals]; for _ in 0..num_locals {
// TODO: Replace with extend.
for local in locals {
value_stack value_stack
.push(local) .push(Default::default())
.map_err(|_| TrapKind::StackOverflow)?; .map_err(|_| TrapKind::StackOverflow)?;
} }