Compare commits

..

15 Commits

Author SHA1 Message Date
Cadey Ratio 0b6dd64219 fix no_std building 2019-10-24 10:29:04 -04:00
adam-rhebo d2ea44e37c Avoid temporary allocations during function context initialization (#217)
* Avoid temporary allocation when push locals during function invocation.

* Extend value stack for all locals at once.
2019-10-22 17:23:25 +02:00
Sergei Pepyakin f19e1c27fc
Fix tiny_keccak (#215) 2019-09-28 19:05:17 +02:00
Sergei Pepyakin 59ab1c8d78
Don't use `cache: cargo` in Travis CI's config (#213) 2019-09-26 14:13:29 +02:00
Sergei Pepyakin e6bdaf76f6
Bump wabt up to 0.9. (#212) 2019-09-26 13:18:57 +02:00
Pierre Krieger 390f4b2c4a Use a Cow for the resumable parameters (#210)
* Use a Cow for the resumable parameters

* Try fixing tests
2019-09-09 12:34:49 +02:00
Sergei Pepyakin 08c09adbf2
Bump wasmi-validation (#209) 2019-09-05 23:49:30 +02:00
Sergei Pepyakin 990e6698cb
Bump wasmi (#208) 2019-09-05 23:26:48 +02:00
DemiMarie-parity 7b1e5820c3 Update parity-wasm (#207) 2019-09-05 22:59:10 +02:00
thiolliere 9d998c7289 Update README.md (#205) 2019-08-27 22:20:17 +02:00
Sergei Pepyakin b1ea069c4a
Update parity-wasm (#198) 2019-07-17 14:24:36 +03:00
Sergei Pepyakin b67af25899
Apply cargo-fix on wasmi (#191)
* cargo-fix wasmi

* fmt

* Remove allow_failures

* Add dyn in benches

* Fix nightly
2019-07-10 17:45:06 +03:00
NikVolf 57cc6c6a3d bump to 0.5 2019-07-09 18:45:31 +03:00
Sergey Pepyakin 1bf3cbe5d0 Bump version to 0.4.6. 2019-07-09 15:55:24 +02:00
Sergei Pepyakin 1a6e5b30de
Use mmap for allocation (#190) 2019-07-08 13:53:04 +02:00
31 changed files with 451 additions and 236 deletions

1
.gitignore vendored
View File

@ -3,3 +3,4 @@
**/*.rs.bk **/*.rs.bk
Cargo.lock Cargo.lock
spec/target spec/target
.idea

View File

@ -26,6 +26,8 @@ script:
- if [ "$TRAVIS_RUST_VERSION" == "nightly" ]; then cargo check --benches --manifest-path=benches/Cargo.toml; fi - if [ "$TRAVIS_RUST_VERSION" == "nightly" ]; then cargo check --benches --manifest-path=benches/Cargo.toml; fi
# Make sure `no_std` version checks. # Make sure `no_std` version checks.
- if [ "$TRAVIS_RUST_VERSION" == "nightly" ]; then cargo +nightly check --no-default-features --features core; fi - if [ "$TRAVIS_RUST_VERSION" == "nightly" ]; then cargo +nightly check --no-default-features --features core; fi
# Check that `vec_memory` feature works.
- cargo check --features vec_memory
- travis_wait 60 ./test.sh - travis_wait 60 ./test.sh
- ./doc.sh - ./doc.sh
@ -38,7 +40,17 @@ after_success: |
ghp-import -n target/doc && ghp-import -n target/doc &&
git push -fq https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages git push -fq https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages
cache: cargo cache:
# Don't use `cache: cargo` since it adds the `target` directory and that can be huge.
# Saving and loading this directory dwarfes actual compilation and test times. But what is more
# important, is that travis timeouts the build since the job doesn't produce any output for more
# than 10 minutes.
#
# So we just cache ~/.cargo directory
directories:
- /home/travis/.cargo
before_cache: before_cache:
# Travis can't cache files that are not readable by "others" # Travis can't cache files that are not readable by "others"
- chmod -R a+r $HOME/.cargo - chmod -R a+r $HOME/.cargo
# According to the Travis CI docs for building Rust project this is done by,
- rm -rf /home/travis/.cargo/registry

View File

@ -1,6 +1,6 @@
[package] [package]
name = "wasmi" name = "wasmi"
version = "0.4.6" version = "0.5.1"
authors = ["Nikolay Volf <nikvolf@gmail.com>", "Svyatoslav Nikolsky <svyatonik@yandex.ru>", "Sergey Pepyakin <s.pepyakin@gmail.com>"] authors = ["Nikolay Volf <nikvolf@gmail.com>", "Svyatoslav Nikolsky <svyatonik@yandex.ru>", "Sergey Pepyakin <s.pepyakin@gmail.com>"]
license = "MIT/Apache-2.0" license = "MIT/Apache-2.0"
readme = "README.md" readme = "README.md"
@ -11,17 +11,17 @@ keywords = ["wasm", "webassembly", "bytecode", "interpreter"]
exclude = [ "/res/*", "/tests/*", "/fuzz/*", "/benches/*" ] exclude = [ "/res/*", "/tests/*", "/fuzz/*", "/benches/*" ]
[dependencies] [dependencies]
wasmi-validation = { version = "0.1", path = "validation", default-features = false } wasmi-validation = { version = "0.2", path = "validation", default-features = false }
parity-wasm = { version = "0.31", default-features = false } parity-wasm = { version = "0.40.1", default-features = false }
memory_units = "0.3.0" memory_units = "0.3.0"
libm = { version = "0.1.2", optional = true } libm = { version = "0.1.2", optional = true }
num-rational = "0.2.2" num-rational = { version = "0.2.2", default-features = false }
num-traits = "0.2.8" num-traits = { version = "0.2.8", default-features = false }
[dev-dependencies] [dev-dependencies]
assert_matches = "1.1" assert_matches = "1.1"
rand = "0.4.2" rand = "0.4.2"
wabt = "0.6" wabt = "0.9"
[features] [features]
default = ["std"] default = ["std"]
@ -30,13 +30,22 @@ std = [
"parity-wasm/std", "parity-wasm/std",
"wasmi-validation/std", "wasmi-validation/std",
"num-rational/std", "num-rational/std",
"num-rational/bigint-std",
"num-traits/std" "num-traits/std"
] ]
# Enable for no_std support # Enable for no_std support
core = [ core = [
# `core` doesn't support vec_memory
"vec_memory",
"wasmi-validation/core", "wasmi-validation/core",
"libm" "libm"
] ]
# Enforce using the linear memory implementation based on `Vec` instead of
# mmap on unix systems.
#
# Useful for tests and if you need to minimize unsafe usage at the cost of performance on some
# workloads.
vec_memory = []
[workspace] [workspace]
members = ["validation"] members = ["validation"]

View File

@ -26,8 +26,8 @@ This crate supports `no_std` environments.
Enable the `core` feature and disable default features: Enable the `core` feature and disable default features:
```toml ```toml
[dependencies] [dependencies]
parity-wasm = { wasmi = {
version = "0.31", version = "*",
default-features = false, default-features = false,
features = "core" features = "core"
} }

View File

@ -6,7 +6,7 @@ authors = ["Sergey Pepyakin <s.pepyakin@gmail.com>"]
[dependencies] [dependencies]
wasmi = { path = ".." } wasmi = { path = ".." }
assert_matches = "1.2" assert_matches = "1.2"
wabt = "0.6" wabt = "0.9"
[profile.bench] [profile.bench]
debug = true debug = true

View File

@ -13,7 +13,7 @@ use wasmi::{ImportsBuilder, Module, ModuleInstance, NopExternals, RuntimeValue};
use test::Bencher; use test::Bencher;
// Load a module from a file. // Load a module from a file.
fn load_from_file(filename: &str) -> Result<Module, Box<error::Error>> { fn load_from_file(filename: &str) -> Result<Module, Box<dyn error::Error>> {
use std::io::prelude::*; use std::io::prelude::*;
let mut file = File::open(filename)?; let mut file = File::open(filename)?;
let mut buf = Vec::new(); let mut buf = Vec::new();

View File

@ -33,7 +33,7 @@ pub extern "C" fn prepare_tiny_keccak() -> *const TinyKeccakTestData {
} }
#[no_mangle] #[no_mangle]
pub extern "C" fn bench_tiny_keccak(test_data: *const TinyKeccakTestData) { pub extern "C" fn bench_tiny_keccak(test_data: *mut TinyKeccakTestData) {
unsafe { unsafe {
let mut keccak = Keccak::new_keccak256(); let mut keccak = Keccak::new_keccak256();
keccak.update((*test_data).data); keccak.update((*test_data).data);

View File

@ -10,7 +10,7 @@ cargo-fuzz = true
[dependencies] [dependencies]
wasmi = { path = ".." } wasmi = { path = ".." }
wabt = "0.6.0" wabt = "0.9"
wasmparser = "0.14.1" wasmparser = "0.14.1"
tempdir = "0.3.6" tempdir = "0.3.6"

View File

@ -7,4 +7,4 @@ authors = ["Sergey Pepyakin <s.pepyakin@gmail.com>"]
honggfuzz = "=0.5.9" # Strict equal since hfuzz requires dep and cmd versions to match. honggfuzz = "=0.5.9" # Strict equal since hfuzz requires dep and cmd versions to match.
wasmi = { path = ".." } wasmi = { path = ".." }
tempdir = "0.3.6" tempdir = "0.3.6"
wabt = "0.6.0" wabt = "0.9"

View File

@ -1,6 +1,8 @@
#[allow(unused_imports)] use alloc::{
use alloc::prelude::v1::*; borrow::Cow,
use alloc::rc::{Rc, Weak}; rc::{Rc, Weak},
vec::Vec,
};
use core::fmt; use core::fmt;
use host::Externals; use host::Externals;
use isa; use isa;
@ -194,12 +196,13 @@ impl FuncInstance {
/// [`resume_execution`]: struct.FuncInvocation.html#method.resume_execution /// [`resume_execution`]: struct.FuncInvocation.html#method.resume_execution
pub fn invoke_resumable<'args>( pub fn invoke_resumable<'args>(
func: &FuncRef, func: &FuncRef,
args: &'args [RuntimeValue], args: impl Into<Cow<'args, [RuntimeValue]>>,
) -> Result<FuncInvocation<'args>, Trap> { ) -> Result<FuncInvocation<'args>, Trap> {
let args = args.into();
check_function_args(func.signature(), &args)?; check_function_args(func.signature(), &args)?;
match *func.as_internal() { match *func.as_internal() {
FuncInstanceInternal::Internal { .. } => { FuncInstanceInternal::Internal { .. } => {
let interpreter = Interpreter::new(func, args, None)?; let interpreter = Interpreter::new(func, &*args, None)?;
Ok(FuncInvocation { Ok(FuncInvocation {
kind: FuncInvocationKind::Internal(interpreter), kind: FuncInvocationKind::Internal(interpreter),
}) })
@ -256,7 +259,7 @@ pub struct FuncInvocation<'args> {
enum FuncInvocationKind<'args> { enum FuncInvocationKind<'args> {
Internal(Interpreter), Internal(Interpreter),
Host { Host {
args: &'args [RuntimeValue], args: Cow<'args, [RuntimeValue]>,
host_func_index: usize, host_func_index: usize,
finished: bool, finished: bool,
}, },
@ -303,7 +306,7 @@ impl<'args> FuncInvocation<'args> {
return Err(ResumableError::AlreadyStarted); return Err(ResumableError::AlreadyStarted);
} }
*finished = true; *finished = true;
Ok(externals.invoke_index(*host_func_index, args.clone().into())?) Ok(externals.invoke_index(*host_func_index, args.as_ref().into())?)
} }
} }
} }

View File

@ -114,11 +114,11 @@ pub trait HostError: 'static + ::core::fmt::Display + ::core::fmt::Debug + Send
} }
} }
impl HostError { impl dyn HostError {
/// Attempt to downcast this `HostError` to a concrete type by reference. /// Attempt to downcast this `HostError` to a concrete type by reference.
pub fn downcast_ref<T: HostError>(&self) -> Option<&T> { pub fn downcast_ref<T: HostError>(&self) -> Option<&T> {
if self.__private_get_type_id__() == TypeId::of::<T>() { if self.__private_get_type_id__() == TypeId::of::<T>() {
unsafe { Some(&*(self as *const HostError as *const T)) } unsafe { Some(&*(self as *const dyn HostError as *const T)) }
} else { } else {
None None
} }
@ -128,7 +128,7 @@ impl HostError {
/// reference. /// reference.
pub fn downcast_mut<T: HostError>(&mut self) -> Option<&mut T> { pub fn downcast_mut<T: HostError>(&mut self) -> Option<&mut T> {
if self.__private_get_type_id__() == TypeId::of::<T>() { if self.__private_get_type_id__() == TypeId::of::<T>() {
unsafe { Some(&mut *(self as *mut HostError as *mut T)) } unsafe { Some(&mut *(self as *mut dyn HostError as *mut T)) }
} else { } else {
None None
} }
@ -257,5 +257,5 @@ mod tests {
} }
// Tests that `HostError` trait is object safe. // Tests that `HostError` trait is object safe.
fn _host_error_is_object_safe(_: &HostError) {} fn _host_error_is_object_safe(_: &dyn HostError) {}
} }

View File

@ -1,7 +1,4 @@
#[allow(unused_imports)] use alloc::{collections::BTreeMap, string::String};
use alloc::prelude::v1::*;
use alloc::collections::BTreeMap;
use func::FuncRef; use func::FuncRef;
use global::GlobalRef; use global::GlobalRef;
@ -103,7 +100,7 @@ pub trait ImportResolver {
/// [`ImportResolver`]: trait.ImportResolver.html /// [`ImportResolver`]: trait.ImportResolver.html
/// [`ModuleImportResolver`]: trait.ModuleImportResolver.html /// [`ModuleImportResolver`]: trait.ModuleImportResolver.html
pub struct ImportsBuilder<'a> { pub struct ImportsBuilder<'a> {
modules: BTreeMap<String, &'a ModuleImportResolver>, modules: BTreeMap<String, &'a dyn ModuleImportResolver>,
} }
impl<'a> Default for ImportsBuilder<'a> { impl<'a> Default for ImportsBuilder<'a> {
@ -124,7 +121,7 @@ impl<'a> ImportsBuilder<'a> {
pub fn with_resolver<N: Into<String>>( pub fn with_resolver<N: Into<String>>(
mut self, mut self,
name: N, name: N,
resolver: &'a ModuleImportResolver, resolver: &'a dyn ModuleImportResolver,
) -> Self { ) -> Self {
self.modules.insert(name.into(), resolver); self.modules.insert(name.into(), resolver);
self self
@ -133,11 +130,15 @@ impl<'a> ImportsBuilder<'a> {
/// Register an resolver by a name. /// Register an resolver by a name.
/// ///
/// Mutable borrowed version. /// Mutable borrowed version.
pub fn push_resolver<N: Into<String>>(&mut self, name: N, resolver: &'a ModuleImportResolver) { pub fn push_resolver<N: Into<String>>(
&mut self,
name: N,
resolver: &'a dyn ModuleImportResolver,
) {
self.modules.insert(name.into(), resolver); self.modules.insert(name.into(), resolver);
} }
fn resolver(&self, name: &str) -> Option<&ModuleImportResolver> { fn resolver(&self, name: &str) -> Option<&dyn ModuleImportResolver> {
self.modules.get(name).cloned() self.modules.get(name).cloned()
} }
} }

View File

@ -67,8 +67,7 @@
//! - Reserved immediates are ignored for `call_indirect`, `current_memory`, `grow_memory`. //! - Reserved immediates are ignored for `call_indirect`, `current_memory`, `grow_memory`.
//! //!
#[allow(unused_imports)] use alloc::vec::Vec;
use alloc::prelude::v1::*;
/// Should we keep a value before "discarding" a stack frame? /// Should we keep a value before "discarding" a stack frame?
/// ///

View File

@ -96,8 +96,6 @@
#![warn(missing_docs)] #![warn(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(not(feature = "std"), no_std)]
//// alloc is required in no_std
#![cfg_attr(not(feature = "std"), feature(alloc, alloc_prelude))]
#[cfg(not(feature = "std"))] #[cfg(not(feature = "std"))]
#[macro_use] #[macro_use]
@ -119,8 +117,11 @@ extern crate parity_wasm;
extern crate wasmi_validation as validation; extern crate wasmi_validation as validation;
#[allow(unused_imports)] use alloc::{
use alloc::prelude::v1::*; boxed::Box,
string::{String, ToString},
vec::Vec,
};
use core::fmt; use core::fmt;
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std::error; use std::error;
@ -239,7 +240,7 @@ pub enum TrapKind {
/// Typically returned from an implementation of [`Externals`]. /// Typically returned from an implementation of [`Externals`].
/// ///
/// [`Externals`]: trait.Externals.html /// [`Externals`]: trait.Externals.html
Host(Box<host::HostError>), Host(Box<dyn host::HostError>),
} }
impl TrapKind { impl TrapKind {
@ -273,7 +274,7 @@ pub enum Error {
/// Trap. /// Trap.
Trap(Trap), Trap(Trap),
/// Custom embedder error. /// Custom embedder error.
Host(Box<host::HostError>), Host(Box<dyn host::HostError>),
} }
impl Error { impl Error {
@ -285,7 +286,7 @@ impl Error {
/// [`Host`]: enum.Error.html#variant.Host /// [`Host`]: enum.Error.html#variant.Host
/// [`Trap`]: enum.Error.html#variant.Trap /// [`Trap`]: enum.Error.html#variant.Trap
/// [`TrapKind::Host`]: enum.TrapKind.html#variant.Host /// [`TrapKind::Host`]: enum.TrapKind.html#variant.Host
pub fn as_host_error(&self) -> Option<&host::HostError> { pub fn as_host_error(&self) -> Option<&dyn host::HostError> {
match *self { match *self {
Error::Host(ref host_err) => Some(&**host_err), Error::Host(ref host_err) => Some(&**host_err),
Error::Trap(ref trap) => match *trap.kind() { Error::Trap(ref trap) => match *trap.kind() {

189
src/memory/mmap_bytebuf.rs Normal file
View File

@ -0,0 +1,189 @@
//! An implementation of a `ByteBuf` based on virtual memory.
//!
//! This implementation uses `mmap` on POSIX systems (and should use `VirtualAlloc` on windows).
//! There are possibilities to improve the performance for the reallocating case by reserving
//! memory up to maximum. This might be a problem for systems that don't have a lot of virtual
//! memory (i.e. 32-bit platforms).
use std::ptr::{self, NonNull};
use std::slice;
struct Mmap {
/// The pointer that points to the start of the mapping.
///
/// This value doesn't change after creation.
ptr: NonNull<u8>,
/// The length of this mapping.
///
/// Cannot be more than `isize::max_value()`. This value doesn't change after creation.
len: usize,
}
impl Mmap {
/// Create a new mmap mapping
///
/// Returns `Err` if:
/// - `len` should not exceed `isize::max_value()`
/// - `len` should be greater than 0.
/// - `mmap` returns an error (almost certainly means out of memory).
fn new(len: usize) -> Result<Self, &'static str> {
if len > isize::max_value() as usize {
return Err("`len` should not exceed `isize::max_value()`");
}
if len == 0 {
return Err("`len` should be greater than 0");
}
let ptr_or_err = unsafe {
// Safety Proof:
// There are not specific safety proofs are required for this call, since the call
// by itself can't invoke any safety problems (however, misusing its result can).
libc::mmap(
// `addr` - let the system to choose the address at which to create the mapping.
ptr::null_mut(),
// the length of the mapping in bytes.
len,
// `prot` - protection flags: READ WRITE !EXECUTE
libc::PROT_READ | libc::PROT_WRITE,
// `flags`
// `MAP_ANON` - mapping is not backed by any file and initial contents are
// initialized to zero.
// `MAP_PRIVATE` - the mapping is private to this process.
libc::MAP_ANON | libc::MAP_PRIVATE,
// `fildes` - a file descriptor. Pass -1 as this is required for some platforms
// when the `MAP_ANON` is passed.
-1,
// `offset` - offset from the file.
0,
)
};
match ptr_or_err {
// With the current parameters, the error can only be returned in case of insufficient
// memory.
libc::MAP_FAILED => Err("mmap returned an error"),
_ => {
let ptr = NonNull::new(ptr_or_err as *mut u8).ok_or("mmap returned 0")?;
Ok(Self { ptr, len })
}
}
}
fn as_slice(&self) -> &[u8] {
unsafe {
// Safety Proof:
// - Aliasing guarantees of `self.ptr` are not violated since `self` is the only owner.
// - This pointer was allocated for `self.len` bytes and thus is a valid slice.
// - `self.len` doesn't change throughout the lifetime of `self`.
// - The value is returned valid for the duration of lifetime of `self`.
// `self` cannot be destroyed while the returned slice is alive.
// - `self.ptr` is of `NonNull` type and thus `.as_ptr()` can never return NULL.
// - `self.len` cannot be larger than `isize::max_value()`.
slice::from_raw_parts(self.ptr.as_ptr(), self.len)
}
}
fn as_slice_mut(&mut self) -> &mut [u8] {
unsafe {
// Safety Proof:
// - See the proof for `Self::as_slice`
// - Additionally, it is not possible to obtain two mutable references for `self.ptr`
slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len)
}
}
}
impl Drop for Mmap {
fn drop(&mut self) {
let ret_val = unsafe {
// Safety proof:
// - `self.ptr` was allocated by a call to `mmap`.
// - `self.len` was saved at the same time and it doesn't change throughout the lifetime
// of `self`.
libc::munmap(self.ptr.as_ptr() as *mut libc::c_void, self.len)
};
// There is no reason for `munmap` to fail to deallocate a private annonymous mapping
// allocated by `mmap`.
// However, for the cases when it actually fails prefer to fail, in order to not leak
// and exhaust the virtual memory.
assert_eq!(ret_val, 0, "munmap failed");
}
}
pub struct ByteBuf {
mmap: Option<Mmap>,
}
impl ByteBuf {
pub fn new(len: usize) -> Result<Self, &'static str> {
let mmap = if len == 0 {
None
} else {
Some(Mmap::new(len)?)
};
Ok(Self { mmap })
}
pub fn realloc(&mut self, new_len: usize) -> Result<(), &'static str> {
let new_mmap = if new_len == 0 {
None
} else {
let mut new_mmap = Mmap::new(new_len)?;
if let Some(cur_mmap) = self.mmap.take() {
let src = cur_mmap.as_slice();
let dst = new_mmap.as_slice_mut();
let amount = src.len().min(dst.len());
dst[..amount].copy_from_slice(&src[..amount]);
}
Some(new_mmap)
};
self.mmap = new_mmap;
Ok(())
}
pub fn len(&self) -> usize {
self.mmap.as_ref().map(|m| m.len).unwrap_or(0)
}
pub fn as_slice(&self) -> &[u8] {
self.mmap.as_ref().map(|m| m.as_slice()).unwrap_or(&[])
}
pub fn as_slice_mut(&mut self) -> &mut [u8] {
self.mmap
.as_mut()
.map(|m| m.as_slice_mut())
.unwrap_or(&mut [])
}
pub fn erase(&mut self) -> Result<(), &'static str> {
let len = self.len();
if len > 0 {
// The order is important.
//
// 1. First we clear, and thus drop, the current mmap if any.
// 2. And then we create a new one.
//
// Otherwise we double the peak memory consumption.
self.mmap = None;
self.mmap = Some(Mmap::new(len)?);
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::ByteBuf;
const PAGE_SIZE: usize = 4096;
// This is not required since wasm memories can only grow but nice to have.
#[test]
fn byte_buf_shrink() {
let mut byte_buf = ByteBuf::new(PAGE_SIZE * 3).unwrap();
byte_buf.realloc(PAGE_SIZE * 2).unwrap();
}
}

View File

@ -1,6 +1,4 @@
#[allow(unused_imports)] use alloc::{rc::Rc, string::ToString, vec::Vec};
use alloc::prelude::v1::*;
use alloc::rc::Rc;
use core::{ use core::{
cell::{Cell, RefCell}, cell::{Cell, RefCell},
cmp, fmt, cmp, fmt,
@ -12,6 +10,16 @@ use parity_wasm::elements::ResizableLimits;
use value::LittleEndianConvert; use value::LittleEndianConvert;
use Error; use Error;
#[cfg(all(unix, not(feature = "vec_memory")))]
#[path = "mmap_bytebuf.rs"]
mod bytebuf;
#[cfg(any(not(unix), feature = "vec_memory"))]
#[path = "vec_bytebuf.rs"]
mod bytebuf;
use self::bytebuf::ByteBuf;
/// Size of a page of [linear memory][`MemoryInstance`] - 64KiB. /// Size of a page of [linear memory][`MemoryInstance`] - 64KiB.
/// ///
/// The size of a memory is always a integer multiple of a page size. /// The size of a memory is always a integer multiple of a page size.
@ -52,11 +60,10 @@ pub struct MemoryInstance {
/// Memory limits. /// Memory limits.
limits: ResizableLimits, limits: ResizableLimits,
/// Linear memory buffer with lazy allocation. /// Linear memory buffer with lazy allocation.
buffer: RefCell<Vec<u8>>, buffer: RefCell<ByteBuf>,
initial: Pages, initial: Pages,
current_size: Cell<usize>, current_size: Cell<usize>,
maximum: Option<Pages>, maximum: Option<Pages>,
lowest_used: Cell<u32>,
} }
impl fmt::Debug for MemoryInstance { impl fmt::Debug for MemoryInstance {
@ -126,23 +133,24 @@ impl MemoryInstance {
validation::validate_memory(initial_u32, maximum_u32).map_err(Error::Memory)?; validation::validate_memory(initial_u32, maximum_u32).map_err(Error::Memory)?;
} }
let memory = MemoryInstance::new(initial, maximum); let memory = MemoryInstance::new(initial, maximum)?;
Ok(MemoryRef(Rc::new(memory))) Ok(MemoryRef(Rc::new(memory)))
} }
/// Create new linear memory instance. /// Create new linear memory instance.
fn new(initial: Pages, maximum: Option<Pages>) -> Self { fn new(initial: Pages, maximum: Option<Pages>) -> Result<Self, Error> {
let limits = ResizableLimits::new(initial.0 as u32, maximum.map(|p| p.0 as u32)); let limits = ResizableLimits::new(initial.0 as u32, maximum.map(|p| p.0 as u32));
let initial_size: Bytes = initial.into(); let initial_size: Bytes = initial.into();
MemoryInstance { Ok(MemoryInstance {
limits: limits, limits: limits,
buffer: RefCell::new(Vec::with_capacity(4096)), buffer: RefCell::new(
ByteBuf::new(initial_size.0).map_err(|err| Error::Memory(err.to_string()))?,
),
initial: initial, initial: initial,
current_size: Cell::new(initial_size.0), current_size: Cell::new(initial_size.0),
maximum: maximum, maximum: maximum,
lowest_used: Cell::new(u32::max_value()), })
}
} }
/// Return linear memory limits. /// Return linear memory limits.
@ -163,16 +171,6 @@ impl MemoryInstance {
self.maximum self.maximum
} }
/// Returns lowest offset ever written or `u32::max_value()` if none.
pub fn lowest_used(&self) -> u32 {
self.lowest_used.get()
}
/// Resets tracked lowest offset.
pub fn reset_lowest_used(&self, addr: u32) {
self.lowest_used.set(addr)
}
/// Returns current linear memory size. /// Returns current linear memory size.
/// ///
/// Maximum memory size cannot exceed `65536` pages or 4GiB. /// Maximum memory size cannot exceed `65536` pages or 4GiB.
@ -193,13 +191,7 @@ impl MemoryInstance {
/// ); /// );
/// ``` /// ```
pub fn current_size(&self) -> Pages { pub fn current_size(&self) -> Pages {
Bytes(self.current_size.get()).round_up_to() Bytes(self.buffer.borrow().len()).round_up_to()
}
/// Returns current used memory size in bytes.
/// This is one more than the highest memory address that had been written to.
pub fn used_size(&self) -> Bytes {
Bytes(self.buffer.borrow().len())
} }
/// Get value from memory at given offset. /// Get value from memory at given offset.
@ -207,7 +199,10 @@ impl MemoryInstance {
let mut buffer = self.buffer.borrow_mut(); let mut buffer = self.buffer.borrow_mut();
let region = let region =
self.checked_region(&mut buffer, offset as usize, ::core::mem::size_of::<T>())?; self.checked_region(&mut buffer, offset as usize, ::core::mem::size_of::<T>())?;
Ok(T::from_little_endian(&buffer[region.range()]).expect("Slice size is checked")) Ok(
T::from_little_endian(&buffer.as_slice_mut()[region.range()])
.expect("Slice size is checked"),
)
} }
/// Copy data from memory at given offset. /// Copy data from memory at given offset.
@ -220,7 +215,7 @@ impl MemoryInstance {
let mut buffer = self.buffer.borrow_mut(); let mut buffer = self.buffer.borrow_mut();
let region = self.checked_region(&mut buffer, offset as usize, size)?; let region = self.checked_region(&mut buffer, offset as usize, size)?;
Ok(buffer[region.range()].to_vec()) Ok(buffer.as_slice_mut()[region.range()].to_vec())
} }
/// Copy data from given offset in the memory into `target` slice. /// Copy data from given offset in the memory into `target` slice.
@ -232,7 +227,7 @@ impl MemoryInstance {
let mut buffer = self.buffer.borrow_mut(); let mut buffer = self.buffer.borrow_mut();
let region = self.checked_region(&mut buffer, offset as usize, target.len())?; let region = self.checked_region(&mut buffer, offset as usize, target.len())?;
target.copy_from_slice(&buffer[region.range()]); target.copy_from_slice(&buffer.as_slice_mut()[region.range()]);
Ok(()) Ok(())
} }
@ -244,10 +239,7 @@ impl MemoryInstance {
.checked_region(&mut buffer, offset as usize, value.len())? .checked_region(&mut buffer, offset as usize, value.len())?
.range(); .range();
if offset < self.lowest_used.get() { buffer.as_slice_mut()[range].copy_from_slice(value);
self.lowest_used.set(offset);
}
buffer[range].copy_from_slice(value);
Ok(()) Ok(())
} }
@ -258,10 +250,7 @@ impl MemoryInstance {
let range = self let range = self
.checked_region(&mut buffer, offset as usize, ::core::mem::size_of::<T>())? .checked_region(&mut buffer, offset as usize, ::core::mem::size_of::<T>())?
.range(); .range();
if offset < self.lowest_used.get() { value.into_little_endian(&mut buffer.as_slice_mut()[range]);
self.lowest_used.set(offset);
}
value.into_little_endian(&mut buffer[range]);
Ok(()) Ok(())
} }
@ -295,19 +284,22 @@ impl MemoryInstance {
} }
let new_buffer_length: Bytes = new_size.into(); let new_buffer_length: Bytes = new_size.into();
self.buffer
.borrow_mut()
.realloc(new_buffer_length.0)
.map_err(|err| Error::Memory(err.to_string()))?;
self.current_size.set(new_buffer_length.0); self.current_size.set(new_buffer_length.0);
Ok(size_before_grow) Ok(size_before_grow)
} }
fn checked_region<B>( fn checked_region(
&self, &self,
buffer: &mut B, buffer: &mut ByteBuf,
offset: usize, offset: usize,
size: usize, size: usize,
) -> Result<CheckedRegion, Error> ) -> Result<CheckedRegion, Error> {
where
B: ::core::ops::DerefMut<Target = Vec<u8>>,
{
let end = offset.checked_add(size).ok_or_else(|| { let end = offset.checked_add(size).ok_or_else(|| {
Error::Memory(format!( Error::Memory(format!(
"trying to access memory block of size {} from offset {}", "trying to access memory block of size {} from offset {}",
@ -315,10 +307,6 @@ impl MemoryInstance {
)) ))
})?; })?;
if end <= self.current_size.get() && buffer.len() < end {
buffer.resize(end, 0);
}
if end > buffer.len() { if end > buffer.len() {
return Err(Error::Memory(format!( return Err(Error::Memory(format!(
"trying to access region [{}..{}] in memory [0..{}]", "trying to access region [{}..{}] in memory [0..{}]",
@ -334,17 +322,14 @@ impl MemoryInstance {
}) })
} }
fn checked_region_pair<B>( fn checked_region_pair(
&self, &self,
buffer: &mut B, buffer: &mut ByteBuf,
offset1: usize, offset1: usize,
size1: usize, size1: usize,
offset2: usize, offset2: usize,
size2: usize, size2: usize,
) -> Result<(CheckedRegion, CheckedRegion), Error> ) -> Result<(CheckedRegion, CheckedRegion), Error> {
where
B: ::core::ops::DerefMut<Target = Vec<u8>>,
{
let end1 = offset1.checked_add(size1).ok_or_else(|| { let end1 = offset1.checked_add(size1).ok_or_else(|| {
Error::Memory(format!( Error::Memory(format!(
"trying to access memory block of size {} from offset {}", "trying to access memory block of size {} from offset {}",
@ -359,11 +344,6 @@ impl MemoryInstance {
)) ))
})?; })?;
let max = cmp::max(end1, end2);
if max <= self.current_size.get() && buffer.len() < max {
buffer.resize(max, 0);
}
if end1 > buffer.len() { if end1 > buffer.len() {
return Err(Error::Memory(format!( return Err(Error::Memory(format!(
"trying to access region [{}..{}] in memory [0..{}]", "trying to access region [{}..{}] in memory [0..{}]",
@ -407,14 +387,10 @@ impl MemoryInstance {
let (read_region, write_region) = let (read_region, write_region) =
self.checked_region_pair(&mut buffer, src_offset, len, dst_offset, len)?; self.checked_region_pair(&mut buffer, src_offset, len, dst_offset, len)?;
if dst_offset < self.lowest_used.get() as usize {
self.lowest_used.set(dst_offset as u32);
}
unsafe { unsafe {
::core::ptr::copy( ::core::ptr::copy(
buffer[read_region.range()].as_ptr(), buffer.as_slice()[read_region.range()].as_ptr(),
buffer[write_region.range()].as_mut_ptr(), buffer.as_slice_mut()[write_region.range()].as_mut_ptr(),
len, len,
) )
} }
@ -450,14 +426,10 @@ impl MemoryInstance {
))); )));
} }
if dst_offset < self.lowest_used.get() as usize {
self.lowest_used.set(dst_offset as u32);
}
unsafe { unsafe {
::core::ptr::copy_nonoverlapping( ::core::ptr::copy_nonoverlapping(
buffer[read_region.range()].as_ptr(), buffer.as_slice()[read_region.range()].as_ptr(),
buffer[write_region.range()].as_mut_ptr(), buffer.as_slice_mut()[write_region.range()].as_mut_ptr(),
len, len,
) )
} }
@ -493,11 +465,7 @@ impl MemoryInstance {
.checked_region(&mut dst_buffer, dst_offset, len)? .checked_region(&mut dst_buffer, dst_offset, len)?
.range(); .range();
if dst_offset < dst.lowest_used.get() as usize { dst_buffer.as_slice_mut()[dst_range].copy_from_slice(&src_buffer.as_slice()[src_range]);
dst.lowest_used.set(dst_offset as u32);
}
dst_buffer[dst_range].copy_from_slice(&src_buffer[src_range]);
Ok(()) Ok(())
} }
@ -514,11 +482,7 @@ impl MemoryInstance {
let range = self.checked_region(&mut buffer, offset, len)?.range(); let range = self.checked_region(&mut buffer, offset, len)?.range();
if offset < self.lowest_used.get() as usize { for val in &mut buffer.as_slice_mut()[range] {
self.lowest_used.set(offset as u32);
}
for val in &mut buffer[range] {
*val = new_val *val = new_val
} }
Ok(()) Ok(())
@ -533,18 +497,28 @@ impl MemoryInstance {
self.clear(offset, 0, len) self.clear(offset, 0, len)
} }
/// Set every byte in the entire linear memory to 0, preserving its size.
///
/// Might be useful for some optimization shenanigans.
pub fn erase(&self) -> Result<(), Error> {
self.buffer
.borrow_mut()
.erase()
.map_err(|err| Error::Memory(err.to_string()))
}
/// Provides direct access to the underlying memory buffer. /// Provides direct access to the underlying memory buffer.
/// ///
/// # Panics /// # Panics
/// ///
/// Any call that requires write access to memory (such as [`set`], [`clear`], etc) made within /// Any call that requires write access to memory (such as [`set`], [`clear`], etc) made within
/// the closure will panic. Note that the buffer size may be arbitraty. Proceed with caution. /// the closure will panic.
/// ///
/// [`set`]: #method.get /// [`set`]: #method.get
/// [`clear`]: #method.set /// [`clear`]: #method.set
pub fn with_direct_access<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R { pub fn with_direct_access<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
let buf = self.buffer.borrow(); let buf = self.buffer.borrow();
f(&*buf) f(buf.as_slice())
} }
/// Provides direct mutable access to the underlying memory buffer. /// Provides direct mutable access to the underlying memory buffer.
@ -552,15 +526,13 @@ impl MemoryInstance {
/// # Panics /// # Panics
/// ///
/// Any calls that requires either read or write access to memory (such as [`get`], [`set`], [`copy`], etc) made /// Any calls that requires either read or write access to memory (such as [`get`], [`set`], [`copy`], etc) made
/// within the closure will panic. Note that the buffer size may be arbitraty. /// within the closure will panic. Proceed with caution.
/// The closure may however resize it. Proceed with caution.
/// ///
/// [`get`]: #method.get /// [`get`]: #method.get
/// [`set`]: #method.set /// [`set`]: #method.set
/// [`copy`]: #method.copy pub fn with_direct_access_mut<R, F: FnOnce(&mut [u8]) -> R>(&self, f: F) -> R {
pub fn with_direct_access_mut<R, F: FnOnce(&mut Vec<u8>) -> R>(&self, f: F) -> R {
let mut buf = self.buffer.borrow_mut(); let mut buf = self.buffer.borrow_mut();
f(&mut buf) f(buf.as_slice_mut())
} }
} }
@ -574,29 +546,21 @@ mod tests {
#[test] #[test]
fn alloc() { fn alloc() {
#[cfg(target_pointer_width = "64")] let mut fixtures = vec![
let fixtures = &[
(0, None, true), (0, None, true),
(0, Some(0), true), (0, Some(0), true),
(1, None, true), (1, None, true),
(1, Some(1), true), (1, Some(1), true),
(0, Some(1), true), (0, Some(1), true),
(1, Some(0), false), (1, Some(0), false),
(0, Some(65536), true), ];
#[cfg(target_pointer_width = "64")]
fixtures.extend(&[
(65536, Some(65536), true), (65536, Some(65536), true),
(65536, Some(0), false), (65536, Some(0), false),
(65536, None, true), (65536, None, true),
]; ]);
#[cfg(target_pointer_width = "32")]
let fixtures = &[
(0, None, true),
(0, Some(0), true),
(1, None, true),
(1, Some(1), true),
(0, Some(1), true),
(1, Some(0), false),
];
for (index, &(initial, maybe_max, expected_ok)) in fixtures.iter().enumerate() { for (index, &(initial, maybe_max, expected_ok)) in fixtures.iter().enumerate() {
let initial: Pages = Pages(initial); let initial: Pages = Pages(initial);
@ -618,7 +582,7 @@ mod tests {
} }
fn create_memory(initial_content: &[u8]) -> MemoryInstance { fn create_memory(initial_content: &[u8]) -> MemoryInstance {
let mem = MemoryInstance::new(Pages(1), Some(Pages(1))); let mem = MemoryInstance::new(Pages(1), Some(Pages(1))).unwrap();
mem.set(0, initial_content) mem.set(0, initial_content)
.expect("Successful initialize the memory"); .expect("Successful initialize the memory");
mem mem
@ -731,7 +695,7 @@ mod tests {
#[test] #[test]
fn get_into() { fn get_into() {
let mem = MemoryInstance::new(Pages(1), None); let mem = MemoryInstance::new(Pages(1), None).unwrap();
mem.set(6, &[13, 17, 129]) mem.set(6, &[13, 17, 129])
.expect("memory set should not fail"); .expect("memory set should not fail");
@ -747,11 +711,19 @@ mod tests {
let mem = MemoryInstance::alloc(Pages(1), None).unwrap(); let mem = MemoryInstance::alloc(Pages(1), None).unwrap();
mem.set(100, &[0]).expect("memory set should not fail"); mem.set(100, &[0]).expect("memory set should not fail");
mem.with_direct_access_mut(|buf| { mem.with_direct_access_mut(|buf| {
assert_eq!(buf.len(), 101); assert_eq!(
buf.len(),
65536,
"the buffer length is expected to be 1 page long"
);
buf[..10].copy_from_slice(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); buf[..10].copy_from_slice(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
}); });
mem.with_direct_access(|buf| { mem.with_direct_access(|buf| {
assert_eq!(buf.len(), 101); assert_eq!(
buf.len(),
65536,
"the buffer length is expected to be 1 page long"
);
assert_eq!(&buf[..10], &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); assert_eq!(&buf[..10], &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
}); });
} }

39
src/memory/vec_bytebuf.rs Normal file
View File

@ -0,0 +1,39 @@
//! An implementation of `ByteBuf` based on a plain `Vec`.
use alloc::vec::Vec;
pub struct ByteBuf {
buf: Vec<u8>,
}
impl ByteBuf {
pub fn new(len: usize) -> Result<Self, &'static str> {
let mut buf = Vec::new();
buf.resize(len, 0u8);
Ok(Self { buf })
}
pub fn realloc(&mut self, new_len: usize) -> Result<(), &'static str> {
self.buf.resize(new_len, 0u8);
Ok(())
}
pub fn len(&self) -> usize {
self.buf.len()
}
pub fn as_slice(&self) -> &[u8] {
self.buf.as_ref()
}
pub fn as_slice_mut(&mut self) -> &mut [u8] {
self.buf.as_mut()
}
pub fn erase(&mut self) -> Result<(), &'static str> {
for v in &mut self.buf {
*v = 0;
}
Ok(())
}
}

View File

@ -1,6 +1,9 @@
#[allow(unused_imports)] use alloc::{
use alloc::prelude::v1::*; borrow::ToOwned,
use alloc::rc::Rc; rc::Rc,
string::{String, ToString},
vec::Vec,
};
use core::cell::RefCell; use core::cell::RefCell;
use core::fmt; use core::fmt;
use Trap; use Trap;
@ -37,13 +40,6 @@ use {Error, MemoryInstance, Module, RuntimeValue, Signature, TableInstance};
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct ModuleRef(pub(crate) Rc<ModuleInstance>); pub struct ModuleRef(pub(crate) Rc<ModuleInstance>);
impl ModuleRef {
/// Creates a new `ModuleRef` from a `ModuleInstance`.
pub fn new(instance: ModuleInstance) -> ModuleRef {
ModuleRef(Rc::new(instance))
}
}
impl ::core::ops::Deref for ModuleRef { impl ::core::ops::Deref for ModuleRef {
type Target = ModuleInstance; type Target = ModuleInstance;
fn deref(&self) -> &ModuleInstance { fn deref(&self) -> &ModuleInstance {
@ -169,31 +165,6 @@ pub struct ModuleInstance {
exports: RefCell<BTreeMap<String, ExternVal>>, exports: RefCell<BTreeMap<String, ExternVal>>,
} }
impl Clone for ModuleInstance {
fn clone(&self) -> Self {
let signatures = self.signatures.borrow();
let vec = &(*signatures);
let signatures = vec
.iter()
.map(|inner_rc| {
let signature = &**inner_rc;
let signature_clone = signature.clone();
Rc::new(signature_clone)
})
.collect();
let signatures = RefCell::new(signatures);
ModuleInstance {
signatures,
tables: self.tables.clone(),
funcs: self.funcs.clone(),
memories: self.memories.clone(),
globals: self.globals.clone(),
exports: self.exports.clone(),
}
}
}
impl ModuleInstance { impl ModuleInstance {
fn default() -> Self { fn default() -> Self {
ModuleInstance { ModuleInstance {
@ -450,7 +421,11 @@ impl ModuleInstance {
.map(|es| es.entries()) .map(|es| es.entries())
.unwrap_or(&[]) .unwrap_or(&[])
{ {
let offset_val = match eval_init_expr(element_segment.offset(), &module_ref) { let offset = element_segment
.offset()
.as_ref()
.expect("passive segments are rejected due to validation");
let offset_val = match eval_init_expr(offset, &module_ref) {
RuntimeValue::I32(v) => v as u32, RuntimeValue::I32(v) => v as u32,
_ => panic!("Due to validation elem segment offset should evaluate to i32"), _ => panic!("Due to validation elem segment offset should evaluate to i32"),
}; };
@ -479,7 +454,11 @@ impl ModuleInstance {
} }
for data_segment in module.data_section().map(|ds| ds.entries()).unwrap_or(&[]) { for data_segment in module.data_section().map(|ds| ds.entries()).unwrap_or(&[]) {
let offset_val = match eval_init_expr(data_segment.offset(), &module_ref) { let offset = data_segment
.offset()
.as_ref()
.expect("passive segments are rejected due to validation");
let offset_val = match eval_init_expr(offset, &module_ref) {
RuntimeValue::I32(v) => v as u32, RuntimeValue::I32(v) => v as u32,
_ => panic!("Due to validation data segment offset should evaluate to i32"), _ => panic!("Due to validation data segment offset should evaluate to i32"),
}; };

View File

@ -1,5 +1,4 @@
#[allow(unused_imports)] use alloc::{string::String, vec::Vec};
use alloc::prelude::v1::*;
use parity_wasm::elements::{BlockType, FuncBody, Instruction}; use parity_wasm::elements::{BlockType, FuncBody, Instruction};
@ -252,13 +251,14 @@ impl Compiler {
); );
self.sink.emit_br_nez(target); self.sink.emit_br_nez(target);
} }
BrTable(ref table, default) => { BrTable(ref br_table_data) => {
// At this point, the condition value is at the top of the stack. // At this point, the condition value is at the top of the stack.
// But at the point of actual jump the condition will already be // But at the point of actual jump the condition will already be
// popped off. // popped off.
let value_stack_height = context.value_stack.len().saturating_sub(1); let value_stack_height = context.value_stack.len().saturating_sub(1);
let targets = table let targets = br_table_data
.table
.iter() .iter()
.map(|depth| { .map(|depth| {
require_target( require_target(
@ -270,7 +270,7 @@ impl Compiler {
}) })
.collect::<Result<Vec<_>, _>>(); .collect::<Result<Vec<_>, _>>();
let default_target = require_target( let default_target = require_target(
default, br_table_data.default,
value_stack_height, value_stack_height,
&context.frame_stack, &context.frame_stack,
&self.label_stack, &self.label_stack,

View File

@ -1,5 +1,4 @@
#[allow(unused_imports)] use alloc::vec::Vec;
use alloc::prelude::v1::*;
use crate::{ use crate::{
isa, isa,

View File

@ -1,5 +1,4 @@
#[allow(unused_imports)] use alloc::{boxed::Box, vec::Vec};
use alloc::prelude::v1::*;
use core::fmt; use core::fmt;
use core::ops; use core::ops;
use core::{u32, usize}; use core::{u32, usize};
@ -1290,14 +1289,8 @@ impl FunctionContext {
debug_assert!(!self.is_initialized); debug_assert!(!self.is_initialized);
let num_locals = locals.iter().map(|l| l.count() as usize).sum(); let num_locals = locals.iter().map(|l| l.count() as usize).sum();
let locals = vec![Default::default(); num_locals];
// TODO: Replace with extend. value_stack.extend(num_locals)?;
for local in locals {
value_stack
.push(local)
.map_err(|_| TrapKind::StackOverflow)?;
}
self.is_initialized = true; self.is_initialized = true;
Ok(()) Ok(())
@ -1443,6 +1436,18 @@ impl ValueStack {
Ok(()) Ok(())
} }
fn extend(&mut self, len: usize) -> Result<(), TrapKind> {
let cells = self
.buf
.get_mut(self.sp..self.sp + len)
.ok_or_else(|| TrapKind::StackOverflow)?;
for cell in cells {
*cell = Default::default();
}
self.sp += len;
Ok(())
}
#[inline] #[inline]
fn len(&self) -> usize { fn len(&self) -> usize {
self.sp self.sp

View File

@ -1,6 +1,4 @@
#[allow(unused_imports)] use alloc::{rc::Rc, vec::Vec};
use alloc::prelude::v1::*;
use alloc::rc::Rc;
use core::cell::RefCell; use core::cell::RefCell;
use core::fmt; use core::fmt;
use core::u32; use core::u32;

View File

@ -285,7 +285,7 @@ fn resume_call_host_func() {
let export = instance.export_by_name("test").unwrap(); let export = instance.export_by_name("test").unwrap();
let func_instance = export.as_func().unwrap(); let func_instance = export.as_func().unwrap();
let mut invocation = FuncInstance::invoke_resumable(&func_instance, &[]).unwrap(); let mut invocation = FuncInstance::invoke_resumable(&func_instance, &[][..]).unwrap();
let result = invocation.start_execution(&mut env); let result = invocation.start_execution(&mut env);
match result { match result {
Err(ResumableError::Trap(_)) => {} Err(ResumableError::Trap(_)) => {}
@ -330,7 +330,7 @@ fn resume_call_host_func_type_mismatch() {
let export = instance.export_by_name("test").unwrap(); let export = instance.export_by_name("test").unwrap();
let func_instance = export.as_func().unwrap(); let func_instance = export.as_func().unwrap();
let mut invocation = FuncInstance::invoke_resumable(&func_instance, &[]).unwrap(); let mut invocation = FuncInstance::invoke_resumable(&func_instance, &[][..]).unwrap();
let result = invocation.start_execution(&mut env); let result = invocation.start_execution(&mut env);
match result { match result {
Err(ResumableError::Trap(_)) => {} Err(ResumableError::Trap(_)) => {}

View File

@ -368,6 +368,7 @@ impl WrapInto<F32> for F64 {
macro_rules! impl_try_truncate_into { macro_rules! impl_try_truncate_into {
(@primitive $from: ident, $into: ident, $to_primitive:path) => { (@primitive $from: ident, $into: ident, $to_primitive:path) => {
impl TryTruncateInto<$into, TrapKind> for $from { impl TryTruncateInto<$into, TrapKind> for $from {
#[cfg(feature = "std")]
fn try_truncate_into(self) -> Result<$into, TrapKind> { fn try_truncate_into(self) -> Result<$into, TrapKind> {
// Casting from a float to an integer will round the float towards zero // Casting from a float to an integer will round the float towards zero
num_rational::BigRational::from_float(self) num_rational::BigRational::from_float(self)
@ -375,6 +376,23 @@ macro_rules! impl_try_truncate_into {
.and_then(|val| $to_primitive(&val)) .and_then(|val| $to_primitive(&val))
.ok_or(TrapKind::InvalidConversionToInt) .ok_or(TrapKind::InvalidConversionToInt)
} }
#[cfg(not(feature = "std"))]
fn try_truncate_into(self) -> Result<$into, TrapKind> {
// Casting from a float to an integer will round the float towards zero
// NOTE: currently this will cause Undefined Behavior if the rounded value cannot be represented by the
// target integer type. This includes Inf and NaN. This is a bug and will be fixed.
if self.is_nan() || self.is_infinite() {
return Err(TrapKind::InvalidConversionToInt);
}
// range check
let result = self as $into;
if result as $from != self.trunc() {
return Err(TrapKind::InvalidConversionToInt);
}
Ok(self as $into)
}
} }
}; };
(@wrapped $from:ident, $intermediate:ident, $into:ident) => { (@wrapped $from:ident, $intermediate:ident, $into:ident) => {
@ -819,15 +837,6 @@ impl_integer!(u32);
impl_integer!(i64); impl_integer!(i64);
impl_integer!(u64); impl_integer!(u64);
// Use std float functions in std environment.
// And libm's implementation in no_std
#[cfg(feature = "std")]
macro_rules! call_math {
($op:ident, $e:expr, $fXX:ident, $FXXExt:ident) => {
$fXX::$op($e)
};
}
#[cfg(not(feature = "std"))]
macro_rules! call_math { macro_rules! call_math {
($op:ident, $e:expr, $fXX:ident, $FXXExt:ident) => { ($op:ident, $e:expr, $fXX:ident, $FXXExt:ident) => {
::libm::$FXXExt::$op($e) ::libm::$FXXExt::$op($e)

View File

@ -18,6 +18,7 @@ fn spec_to_runtime_value(val: Value<u32, u64>) -> RuntimeValue {
Value::I64(v) => RuntimeValue::I64(v), Value::I64(v) => RuntimeValue::I64(v),
Value::F32(v) => RuntimeValue::F32(v.into()), Value::F32(v) => RuntimeValue::F32(v.into()),
Value::F64(v) => RuntimeValue::F64(v.into()), Value::F64(v) => RuntimeValue::F64(v.into()),
Value::V128(_) => panic!("v128 is not supported"),
} }
} }

View File

@ -1,6 +1,6 @@
[package] [package]
name = "wasmi-validation" name = "wasmi-validation"
version = "0.1.0" version = "0.2.0"
authors = ["Parity Technologies <admin@parity.io>"] authors = ["Parity Technologies <admin@parity.io>"]
edition = "2018" edition = "2018"
license = "MIT/Apache-2.0" license = "MIT/Apache-2.0"
@ -8,7 +8,7 @@ repository = "https://github.com/paritytech/wasmi"
description = "Wasm code validator" description = "Wasm code validator"
[dependencies] [dependencies]
parity-wasm = { version = "0.31", default-features = false } parity-wasm = { version = "0.40.1", default-features = false }
[dev-dependencies] [dev-dependencies]
assert_matches = "1.1" assert_matches = "1.1"

View File

@ -1,6 +1,5 @@
use crate::Error; use crate::Error;
#[allow(unused_imports)] use alloc::vec::Vec;
use alloc::prelude::v1::*;
use parity_wasm::elements::{ use parity_wasm::elements::{
BlockType, FunctionType, GlobalType, MemoryType, TableType, ValueType, BlockType, FunctionType, GlobalType, MemoryType, TableType, ValueType,
}; };

View File

@ -1,6 +1,3 @@
#[allow(unused_imports)]
use alloc::prelude::v1::*;
use crate::{ use crate::{
context::ModuleContext, stack::StackWithLimit, util::Locals, Error, FuncValidator, context::ModuleContext, stack::StackWithLimit, util::Locals, Error, FuncValidator,
DEFAULT_MEMORY_INDEX, DEFAULT_TABLE_INDEX, DEFAULT_MEMORY_INDEX, DEFAULT_TABLE_INDEX,
@ -269,8 +266,8 @@ impl<'a> FunctionValidationContext<'a> {
BrIf(depth) => { BrIf(depth) => {
self.validate_br_if(depth)?; self.validate_br_if(depth)?;
} }
BrTable(ref table, default) => { BrTable(ref br_table_data) => {
self.validate_br_table(table, default)?; self.validate_br_table(&*br_table_data.table, br_table_data.default)?;
make_top_frame_polymorphic(&mut self.value_stack, &mut self.frame_stack); make_top_frame_polymorphic(&mut self.value_stack, &mut self.frame_stack);
} }
Return => { Return => {

View File

@ -2,8 +2,6 @@
// #![warn(missing_docs)] // #![warn(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(not(feature = "std"), no_std)]
//// alloc is required in no_std
#![cfg_attr(not(feature = "std"), feature(alloc, alloc_prelude))]
#[cfg(not(feature = "std"))] #[cfg(not(feature = "std"))]
#[macro_use] #[macro_use]
@ -21,8 +19,7 @@ pub const DEFAULT_TABLE_INDEX: u32 = 0;
/// Maximal number of pages that a wasm instance supports. /// Maximal number of pages that a wasm instance supports.
pub const LINEAR_MEMORY_MAX_PAGES: u32 = 65536; pub const LINEAR_MEMORY_MAX_PAGES: u32 = 65536;
#[allow(unused_imports)] use alloc::{string::String, vec::Vec};
use alloc::prelude::v1::*;
use core::fmt; use core::fmt;
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std::error; use std::error;
@ -322,7 +319,11 @@ pub fn validate_module<V: Validator>(module: &Module) -> Result<V::Output, Error
if let Some(data_section) = module.data_section() { if let Some(data_section) = module.data_section() {
for data_segment in data_section.entries() { for data_segment in data_section.entries() {
context.require_memory(data_segment.index())?; context.require_memory(data_segment.index())?;
let init_ty = expr_const_type(data_segment.offset(), context.globals())?; let offset = data_segment
.offset()
.as_ref()
.ok_or_else(|| Error("passive memory segments are not supported".into()))?;
let init_ty = expr_const_type(&offset, context.globals())?;
if init_ty != ValueType::I32 { if init_ty != ValueType::I32 {
return Err(Error("segment offset should return I32".into())); return Err(Error("segment offset should return I32".into()));
} }
@ -333,8 +334,11 @@ pub fn validate_module<V: Validator>(module: &Module) -> Result<V::Output, Error
if let Some(element_section) = module.elements_section() { if let Some(element_section) = module.elements_section() {
for element_segment in element_section.entries() { for element_segment in element_section.entries() {
context.require_table(element_segment.index())?; context.require_table(element_segment.index())?;
let offset = element_segment
let init_ty = expr_const_type(element_segment.offset(), context.globals())?; .offset()
.as_ref()
.ok_or_else(|| Error("passive element segments are not supported".into()))?;
let init_ty = expr_const_type(&offset, context.globals())?;
if init_ty != ValueType::I32 { if init_ty != ValueType::I32 {
return Err(Error("segment offset should return I32".into())); return Err(Error("segment offset should return I32".into()));
} }

View File

@ -1,5 +1,4 @@
#[allow(unused_imports)] use alloc::{string::String, vec::Vec};
use alloc::prelude::v1::*;
use core::fmt; use core::fmt;
#[cfg(feature = "std")] #[cfg(feature = "std")]

View File

@ -1,6 +1,5 @@
use crate::Error; use crate::Error;
#[allow(unused_imports)] use alloc::string::String;
use alloc::prelude::v1::*;
use parity_wasm::elements::{Local, ValueType}; use parity_wasm::elements::{Local, ValueType};
#[cfg(test)] #[cfg(test)]