Compare commits
9 Commits
master
...
cmichi-add
Author | SHA1 | Date |
---|---|---|
Michael Mueller | c96915b4df | |
Michael Mueller | b1bd7950d9 | |
Michael Mueller | 70a2e612bc | |
Michael Mueller | 81f34a6ab6 | |
Michael Mueller | 91684c25ba | |
Michael Mueller | d08a08de51 | |
Michael Mueller | f111950cbd | |
Michael Mueller | 99b0e03e4e | |
Michael Mueller | 7546d3026d |
|
@ -3,4 +3,3 @@
|
|||
**/*.rs.bk
|
||||
Cargo.lock
|
||||
spec/target
|
||||
.idea
|
||||
|
|
14
.travis.yml
14
.travis.yml
|
@ -26,8 +26,6 @@ script:
|
|||
- if [ "$TRAVIS_RUST_VERSION" == "nightly" ]; then cargo check --benches --manifest-path=benches/Cargo.toml; fi
|
||||
# Make sure `no_std` version checks.
|
||||
- if [ "$TRAVIS_RUST_VERSION" == "nightly" ]; then cargo +nightly check --no-default-features --features core; fi
|
||||
# Check that `vec_memory` feature works.
|
||||
- cargo check --features vec_memory
|
||||
- travis_wait 60 ./test.sh
|
||||
- ./doc.sh
|
||||
|
||||
|
@ -40,17 +38,7 @@ after_success: |
|
|||
ghp-import -n target/doc &&
|
||||
git push -fq https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages
|
||||
|
||||
cache:
|
||||
# Don't use `cache: cargo` since it adds the `target` directory and that can be huge.
|
||||
# Saving and loading this directory dwarfes actual compilation and test times. But what is more
|
||||
# important, is that travis timeouts the build since the job doesn't produce any output for more
|
||||
# than 10 minutes.
|
||||
#
|
||||
# So we just cache ~/.cargo directory
|
||||
directories:
|
||||
- /home/travis/.cargo
|
||||
cache: cargo
|
||||
before_cache:
|
||||
# Travis can't cache files that are not readable by "others"
|
||||
- chmod -R a+r $HOME/.cargo
|
||||
# According to the Travis CI docs for building Rust project this is done by,
|
||||
- rm -rf /home/travis/.cargo/registry
|
||||
|
|
23
Cargo.toml
23
Cargo.toml
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "wasmi"
|
||||
version = "0.5.1"
|
||||
version = "0.4.6"
|
||||
authors = ["Nikolay Volf <nikvolf@gmail.com>", "Svyatoslav Nikolsky <svyatonik@yandex.ru>", "Sergey Pepyakin <s.pepyakin@gmail.com>"]
|
||||
license = "MIT/Apache-2.0"
|
||||
readme = "README.md"
|
||||
|
@ -11,41 +11,34 @@ keywords = ["wasm", "webassembly", "bytecode", "interpreter"]
|
|||
exclude = [ "/res/*", "/tests/*", "/fuzz/*", "/benches/*" ]
|
||||
|
||||
[dependencies]
|
||||
wasmi-validation = { version = "0.2", path = "validation", default-features = false }
|
||||
parity-wasm = { version = "0.40.1", default-features = false }
|
||||
wasmi-validation = { version = "0.1", path = "validation", default-features = false }
|
||||
parity-wasm = { version = "0.31", default-features = false }
|
||||
memory_units = "0.3.0"
|
||||
libm = { version = "0.1.2", optional = true }
|
||||
num-rational = { version = "0.2.2", default-features = false }
|
||||
num-traits = { version = "0.2.8", default-features = false }
|
||||
num-rational = "0.2.2"
|
||||
num-traits = "0.2.8"
|
||||
atomic = { version = "0.4", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_matches = "1.1"
|
||||
rand = "0.4.2"
|
||||
wabt = "0.9"
|
||||
wabt = "0.6"
|
||||
|
||||
[features]
|
||||
threadsafe = ["atomic"]
|
||||
default = ["std"]
|
||||
# Disable for no_std support
|
||||
std = [
|
||||
"parity-wasm/std",
|
||||
"wasmi-validation/std",
|
||||
"num-rational/std",
|
||||
"num-rational/bigint-std",
|
||||
"num-traits/std"
|
||||
]
|
||||
# Enable for no_std support
|
||||
core = [
|
||||
# `core` doesn't support vec_memory
|
||||
"vec_memory",
|
||||
"wasmi-validation/core",
|
||||
"libm"
|
||||
]
|
||||
# Enforce using the linear memory implementation based on `Vec` instead of
|
||||
# mmap on unix systems.
|
||||
#
|
||||
# Useful for tests and if you need to minimize unsafe usage at the cost of performance on some
|
||||
# workloads.
|
||||
vec_memory = []
|
||||
|
||||
[workspace]
|
||||
members = ["validation"]
|
||||
|
|
18
README.md
18
README.md
|
@ -18,6 +18,7 @@ git clone https://github.com/paritytech/wasmi.git --recursive
|
|||
cd wasmi
|
||||
cargo build
|
||||
cargo test
|
||||
cargo test --features threadsafe
|
||||
```
|
||||
|
||||
# `no_std` support
|
||||
|
@ -26,8 +27,8 @@ This crate supports `no_std` environments.
|
|||
Enable the `core` feature and disable default features:
|
||||
```toml
|
||||
[dependencies]
|
||||
wasmi = {
|
||||
version = "*",
|
||||
parity-wasm = {
|
||||
version = "0.31",
|
||||
default-features = false,
|
||||
features = "core"
|
||||
}
|
||||
|
@ -39,6 +40,19 @@ Also, code related to `std::error` is disabled.
|
|||
Floating point operations in `no_std` use [`libm`](https://crates.io/crates/libm), which sometimes panics in debug mode (https://github.com/japaric/libm/issues/4).
|
||||
So make sure to either use release builds or avoid WASM with floating point operations, for example by using [`deny_floating_point`](https://docs.rs/wasmi/0.4.0/wasmi/struct.Module.html#method.deny_floating_point).
|
||||
|
||||
# Thread-safe support
|
||||
|
||||
This crate supports thread-safe environments.
|
||||
Enable the `threadsafe` feature and Rust's thread-safe data structures will be used.
|
||||
```toml
|
||||
[dependencies]
|
||||
parity-wasm = {
|
||||
version = "0.31",
|
||||
default-features = true,
|
||||
features = "threadsafe"
|
||||
}
|
||||
```
|
||||
|
||||
# License
|
||||
|
||||
`wasmi` is primarily distributed under the terms of both the MIT
|
||||
|
|
|
@ -6,7 +6,7 @@ authors = ["Sergey Pepyakin <s.pepyakin@gmail.com>"]
|
|||
[dependencies]
|
||||
wasmi = { path = ".." }
|
||||
assert_matches = "1.2"
|
||||
wabt = "0.9"
|
||||
wabt = "0.6"
|
||||
|
||||
[profile.bench]
|
||||
debug = true
|
||||
|
|
|
@ -13,7 +13,7 @@ use wasmi::{ImportsBuilder, Module, ModuleInstance, NopExternals, RuntimeValue};
|
|||
use test::Bencher;
|
||||
|
||||
// Load a module from a file.
|
||||
fn load_from_file(filename: &str) -> Result<Module, Box<dyn error::Error>> {
|
||||
fn load_from_file(filename: &str) -> Result<Module, Box<error::Error>> {
|
||||
use std::io::prelude::*;
|
||||
let mut file = File::open(filename)?;
|
||||
let mut buf = Vec::new();
|
||||
|
|
|
@ -33,7 +33,7 @@ pub extern "C" fn prepare_tiny_keccak() -> *const TinyKeccakTestData {
|
|||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn bench_tiny_keccak(test_data: *mut TinyKeccakTestData) {
|
||||
pub extern "C" fn bench_tiny_keccak(test_data: *const TinyKeccakTestData) {
|
||||
unsafe {
|
||||
let mut keccak = Keccak::new_keccak256();
|
||||
keccak.update((*test_data).data);
|
||||
|
|
|
@ -10,7 +10,7 @@ cargo-fuzz = true
|
|||
|
||||
[dependencies]
|
||||
wasmi = { path = ".." }
|
||||
wabt = "0.9"
|
||||
wabt = "0.6.0"
|
||||
wasmparser = "0.14.1"
|
||||
tempdir = "0.3.6"
|
||||
|
||||
|
|
|
@ -7,4 +7,4 @@ authors = ["Sergey Pepyakin <s.pepyakin@gmail.com>"]
|
|||
honggfuzz = "=0.5.9" # Strict equal since hfuzz requires dep and cmd versions to match.
|
||||
wasmi = { path = ".." }
|
||||
tempdir = "0.3.6"
|
||||
wabt = "0.9"
|
||||
wabt = "0.6.0"
|
||||
|
|
38
src/func.rs
38
src/func.rs
|
@ -1,8 +1,5 @@
|
|||
use alloc::{
|
||||
borrow::Cow,
|
||||
rc::{Rc, Weak},
|
||||
vec::Vec,
|
||||
};
|
||||
#[allow(unused_imports)]
|
||||
use alloc::prelude::v1::*;
|
||||
use core::fmt;
|
||||
use host::Externals;
|
||||
use isa;
|
||||
|
@ -19,7 +16,7 @@ use {Signature, Trap};
|
|||
///
|
||||
/// [`FuncInstance`]: struct.FuncInstance.html
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FuncRef(Rc<FuncInstance>);
|
||||
pub struct FuncRef(::MyRc<FuncInstance>);
|
||||
|
||||
impl ::core::ops::Deref for FuncRef {
|
||||
type Target = FuncInstance;
|
||||
|
@ -47,9 +44,9 @@ pub struct FuncInstance(FuncInstanceInternal);
|
|||
#[derive(Clone)]
|
||||
pub(crate) enum FuncInstanceInternal {
|
||||
Internal {
|
||||
signature: Rc<Signature>,
|
||||
module: Weak<ModuleInstance>,
|
||||
body: Rc<FuncBody>,
|
||||
signature: ::MyRc<Signature>,
|
||||
module: ::MyWeak<ModuleInstance>,
|
||||
body: ::MyRc<FuncBody>,
|
||||
},
|
||||
Host {
|
||||
signature: Signature,
|
||||
|
@ -86,7 +83,7 @@ impl FuncInstance {
|
|||
signature,
|
||||
host_func_index,
|
||||
};
|
||||
FuncRef(Rc::new(FuncInstance(func)))
|
||||
FuncRef(::MyRc::new(FuncInstance(func)))
|
||||
}
|
||||
|
||||
/// Returns [signature] of this function instance.
|
||||
|
@ -106,21 +103,21 @@ impl FuncInstance {
|
|||
}
|
||||
|
||||
pub(crate) fn alloc_internal(
|
||||
module: Weak<ModuleInstance>,
|
||||
signature: Rc<Signature>,
|
||||
module: ::MyWeak<ModuleInstance>,
|
||||
signature: ::MyRc<Signature>,
|
||||
body: FuncBody,
|
||||
) -> FuncRef {
|
||||
let func = FuncInstanceInternal::Internal {
|
||||
signature,
|
||||
module: module,
|
||||
body: Rc::new(body),
|
||||
body: ::MyRc::new(body),
|
||||
};
|
||||
FuncRef(Rc::new(FuncInstance(func)))
|
||||
FuncRef(::MyRc::new(FuncInstance(func)))
|
||||
}
|
||||
|
||||
pub(crate) fn body(&self) -> Option<Rc<FuncBody>> {
|
||||
pub(crate) fn body(&self) -> Option<::MyRc<FuncBody>> {
|
||||
match *self.as_internal() {
|
||||
FuncInstanceInternal::Internal { ref body, .. } => Some(Rc::clone(body)),
|
||||
FuncInstanceInternal::Internal { ref body, .. } => Some(::MyRc::clone(body)),
|
||||
FuncInstanceInternal::Host { .. } => None,
|
||||
}
|
||||
}
|
||||
|
@ -196,13 +193,12 @@ impl FuncInstance {
|
|||
/// [`resume_execution`]: struct.FuncInvocation.html#method.resume_execution
|
||||
pub fn invoke_resumable<'args>(
|
||||
func: &FuncRef,
|
||||
args: impl Into<Cow<'args, [RuntimeValue]>>,
|
||||
args: &'args [RuntimeValue],
|
||||
) -> Result<FuncInvocation<'args>, Trap> {
|
||||
let args = args.into();
|
||||
check_function_args(func.signature(), &args)?;
|
||||
match *func.as_internal() {
|
||||
FuncInstanceInternal::Internal { .. } => {
|
||||
let interpreter = Interpreter::new(func, &*args, None)?;
|
||||
let interpreter = Interpreter::new(func, args, None)?;
|
||||
Ok(FuncInvocation {
|
||||
kind: FuncInvocationKind::Internal(interpreter),
|
||||
})
|
||||
|
@ -259,7 +255,7 @@ pub struct FuncInvocation<'args> {
|
|||
enum FuncInvocationKind<'args> {
|
||||
Internal(Interpreter),
|
||||
Host {
|
||||
args: Cow<'args, [RuntimeValue]>,
|
||||
args: &'args [RuntimeValue],
|
||||
host_func_index: usize,
|
||||
finished: bool,
|
||||
},
|
||||
|
@ -306,7 +302,7 @@ impl<'args> FuncInvocation<'args> {
|
|||
return Err(ResumableError::AlreadyStarted);
|
||||
}
|
||||
*finished = true;
|
||||
Ok(externals.invoke_index(*host_func_index, args.as_ref().into())?)
|
||||
Ok(externals.invoke_index(*host_func_index, args.clone().into())?)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
use alloc::rc::Rc;
|
||||
use core::cell::Cell;
|
||||
use parity_wasm::elements::ValueType as EValueType;
|
||||
use types::ValueType;
|
||||
use value::RuntimeValue;
|
||||
|
@ -11,7 +9,7 @@ use Error;
|
|||
///
|
||||
/// [`GlobalInstance`]: struct.GlobalInstance.html
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct GlobalRef(Rc<GlobalInstance>);
|
||||
pub struct GlobalRef(::MyRc<GlobalInstance>);
|
||||
|
||||
impl ::core::ops::Deref for GlobalRef {
|
||||
type Target = GlobalInstance;
|
||||
|
@ -33,7 +31,7 @@ impl ::core::ops::Deref for GlobalRef {
|
|||
/// [`I64`]: enum.RuntimeValue.html#variant.I64
|
||||
#[derive(Debug)]
|
||||
pub struct GlobalInstance {
|
||||
val: Cell<RuntimeValue>,
|
||||
val: ::MyCell<RuntimeValue>,
|
||||
mutable: bool,
|
||||
}
|
||||
|
||||
|
@ -43,8 +41,8 @@ impl GlobalInstance {
|
|||
/// Since it is possible to export only immutable globals,
|
||||
/// users likely want to set `mutable` to `false`.
|
||||
pub fn alloc(val: RuntimeValue, mutable: bool) -> GlobalRef {
|
||||
GlobalRef(Rc::new(GlobalInstance {
|
||||
val: Cell::new(val),
|
||||
GlobalRef(::MyRc::new(GlobalInstance {
|
||||
val: ::MyCell::new(val),
|
||||
mutable,
|
||||
}))
|
||||
}
|
||||
|
|
|
@ -114,11 +114,11 @@ pub trait HostError: 'static + ::core::fmt::Display + ::core::fmt::Debug + Send
|
|||
}
|
||||
}
|
||||
|
||||
impl dyn HostError {
|
||||
impl HostError {
|
||||
/// Attempt to downcast this `HostError` to a concrete type by reference.
|
||||
pub fn downcast_ref<T: HostError>(&self) -> Option<&T> {
|
||||
if self.__private_get_type_id__() == TypeId::of::<T>() {
|
||||
unsafe { Some(&*(self as *const dyn HostError as *const T)) }
|
||||
unsafe { Some(&*(self as *const HostError as *const T)) }
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -128,7 +128,7 @@ impl dyn HostError {
|
|||
/// reference.
|
||||
pub fn downcast_mut<T: HostError>(&mut self) -> Option<&mut T> {
|
||||
if self.__private_get_type_id__() == TypeId::of::<T>() {
|
||||
unsafe { Some(&mut *(self as *mut dyn HostError as *mut T)) }
|
||||
unsafe { Some(&mut *(self as *mut HostError as *mut T)) }
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -257,5 +257,5 @@ mod tests {
|
|||
}
|
||||
|
||||
// Tests that `HostError` trait is object safe.
|
||||
fn _host_error_is_object_safe(_: &dyn HostError) {}
|
||||
fn _host_error_is_object_safe(_: &HostError) {}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
use alloc::{collections::BTreeMap, string::String};
|
||||
#[allow(unused_imports)]
|
||||
use alloc::prelude::v1::*;
|
||||
|
||||
use alloc::collections::BTreeMap;
|
||||
|
||||
use func::FuncRef;
|
||||
use global::GlobalRef;
|
||||
|
@ -100,7 +103,7 @@ pub trait ImportResolver {
|
|||
/// [`ImportResolver`]: trait.ImportResolver.html
|
||||
/// [`ModuleImportResolver`]: trait.ModuleImportResolver.html
|
||||
pub struct ImportsBuilder<'a> {
|
||||
modules: BTreeMap<String, &'a dyn ModuleImportResolver>,
|
||||
modules: BTreeMap<String, &'a ModuleImportResolver>,
|
||||
}
|
||||
|
||||
impl<'a> Default for ImportsBuilder<'a> {
|
||||
|
@ -121,7 +124,7 @@ impl<'a> ImportsBuilder<'a> {
|
|||
pub fn with_resolver<N: Into<String>>(
|
||||
mut self,
|
||||
name: N,
|
||||
resolver: &'a dyn ModuleImportResolver,
|
||||
resolver: &'a ModuleImportResolver,
|
||||
) -> Self {
|
||||
self.modules.insert(name.into(), resolver);
|
||||
self
|
||||
|
@ -130,15 +133,11 @@ impl<'a> ImportsBuilder<'a> {
|
|||
/// Register an resolver by a name.
|
||||
///
|
||||
/// Mutable borrowed version.
|
||||
pub fn push_resolver<N: Into<String>>(
|
||||
&mut self,
|
||||
name: N,
|
||||
resolver: &'a dyn ModuleImportResolver,
|
||||
) {
|
||||
pub fn push_resolver<N: Into<String>>(&mut self, name: N, resolver: &'a ModuleImportResolver) {
|
||||
self.modules.insert(name.into(), resolver);
|
||||
}
|
||||
|
||||
fn resolver(&self, name: &str) -> Option<&dyn ModuleImportResolver> {
|
||||
fn resolver(&self, name: &str) -> Option<&ModuleImportResolver> {
|
||||
self.modules.get(name).cloned()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,7 +67,8 @@
|
|||
//! - Reserved immediates are ignored for `call_indirect`, `current_memory`, `grow_memory`.
|
||||
//!
|
||||
|
||||
use alloc::vec::Vec;
|
||||
#[allow(unused_imports)]
|
||||
use alloc::prelude::v1::*;
|
||||
|
||||
/// Should we keep a value before "discarding" a stack frame?
|
||||
///
|
||||
|
|
27
src/lib.rs
27
src/lib.rs
|
@ -96,6 +96,8 @@
|
|||
|
||||
#![warn(missing_docs)]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
//// alloc is required in no_std
|
||||
#![cfg_attr(not(feature = "std"), feature(alloc, alloc_prelude))]
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
#[macro_use]
|
||||
|
@ -117,11 +119,8 @@ extern crate parity_wasm;
|
|||
|
||||
extern crate wasmi_validation as validation;
|
||||
|
||||
use alloc::{
|
||||
boxed::Box,
|
||||
string::{String, ToString},
|
||||
vec::Vec,
|
||||
};
|
||||
#[allow(unused_imports)]
|
||||
use alloc::prelude::v1::*;
|
||||
use core::fmt;
|
||||
#[cfg(feature = "std")]
|
||||
use std::error;
|
||||
|
@ -240,7 +239,7 @@ pub enum TrapKind {
|
|||
/// Typically returned from an implementation of [`Externals`].
|
||||
///
|
||||
/// [`Externals`]: trait.Externals.html
|
||||
Host(Box<dyn host::HostError>),
|
||||
Host(Box<host::HostError>),
|
||||
}
|
||||
|
||||
impl TrapKind {
|
||||
|
@ -274,7 +273,7 @@ pub enum Error {
|
|||
/// Trap.
|
||||
Trap(Trap),
|
||||
/// Custom embedder error.
|
||||
Host(Box<dyn host::HostError>),
|
||||
Host(Box<host::HostError>),
|
||||
}
|
||||
|
||||
impl Error {
|
||||
|
@ -286,7 +285,7 @@ impl Error {
|
|||
/// [`Host`]: enum.Error.html#variant.Host
|
||||
/// [`Trap`]: enum.Error.html#variant.Trap
|
||||
/// [`TrapKind::Host`]: enum.TrapKind.html#variant.Host
|
||||
pub fn as_host_error(&self) -> Option<&dyn host::HostError> {
|
||||
pub fn as_host_error(&self) -> Option<&host::HostError> {
|
||||
match *self {
|
||||
Error::Host(ref host_err) => Some(&**host_err),
|
||||
Error::Trap(ref trap) => match *trap.kind() {
|
||||
|
@ -397,6 +396,12 @@ mod table;
|
|||
mod types;
|
||||
mod value;
|
||||
|
||||
#[cfg(feature = "threadsafe")]
|
||||
mod threadsafe;
|
||||
|
||||
#[cfg(not(feature = "threadsafe"))]
|
||||
mod not_threadsafe;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
|
@ -411,6 +416,12 @@ pub use self::table::{TableInstance, TableRef};
|
|||
pub use self::types::{GlobalDescriptor, MemoryDescriptor, Signature, TableDescriptor, ValueType};
|
||||
pub use self::value::{Error as ValueError, FromRuntimeValue, LittleEndianConvert, RuntimeValue};
|
||||
|
||||
#[cfg(feature = "threadsafe")]
|
||||
pub use self::threadsafe::*;
|
||||
|
||||
#[cfg(not(feature = "threadsafe"))]
|
||||
pub use self::not_threadsafe::*;
|
||||
|
||||
/// WebAssembly-specific sizes and units.
|
||||
pub mod memory_units {
|
||||
pub use memory_units_crate::wasm32::*;
|
||||
|
|
|
@ -1,25 +1,11 @@
|
|||
use alloc::{rc::Rc, string::ToString, vec::Vec};
|
||||
use core::{
|
||||
cell::{Cell, RefCell},
|
||||
cmp, fmt,
|
||||
ops::Range,
|
||||
u32,
|
||||
};
|
||||
#[allow(unused_imports)]
|
||||
use alloc::prelude::v1::*;
|
||||
use core::{cmp, fmt, ops::Range, u32};
|
||||
use memory_units::{Bytes, Pages, RoundUpTo};
|
||||
use parity_wasm::elements::ResizableLimits;
|
||||
use value::LittleEndianConvert;
|
||||
use Error;
|
||||
|
||||
#[cfg(all(unix, not(feature = "vec_memory")))]
|
||||
#[path = "mmap_bytebuf.rs"]
|
||||
mod bytebuf;
|
||||
|
||||
#[cfg(any(not(unix), feature = "vec_memory"))]
|
||||
#[path = "vec_bytebuf.rs"]
|
||||
mod bytebuf;
|
||||
|
||||
use self::bytebuf::ByteBuf;
|
||||
|
||||
/// Size of a page of [linear memory][`MemoryInstance`] - 64KiB.
|
||||
///
|
||||
/// The size of a memory is always a integer multiple of a page size.
|
||||
|
@ -34,7 +20,7 @@ pub const LINEAR_MEMORY_PAGE_SIZE: Bytes = Bytes(65536);
|
|||
/// [`MemoryInstance`]: struct.MemoryInstance.html
|
||||
///
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MemoryRef(Rc<MemoryInstance>);
|
||||
pub struct MemoryRef(::MyRc<MemoryInstance>);
|
||||
|
||||
impl ::core::ops::Deref for MemoryRef {
|
||||
type Target = MemoryInstance;
|
||||
|
@ -60,10 +46,11 @@ pub struct MemoryInstance {
|
|||
/// Memory limits.
|
||||
limits: ResizableLimits,
|
||||
/// Linear memory buffer with lazy allocation.
|
||||
buffer: RefCell<ByteBuf>,
|
||||
buffer: ::MyRefCell<Vec<u8>>,
|
||||
initial: Pages,
|
||||
current_size: Cell<usize>,
|
||||
current_size: ::MyCell<usize>,
|
||||
maximum: Option<Pages>,
|
||||
lowest_used: ::MyCell<u32>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for MemoryInstance {
|
||||
|
@ -133,24 +120,23 @@ impl MemoryInstance {
|
|||
validation::validate_memory(initial_u32, maximum_u32).map_err(Error::Memory)?;
|
||||
}
|
||||
|
||||
let memory = MemoryInstance::new(initial, maximum)?;
|
||||
Ok(MemoryRef(Rc::new(memory)))
|
||||
let memory = MemoryInstance::new(initial, maximum);
|
||||
Ok(MemoryRef(::MyRc::new(memory)))
|
||||
}
|
||||
|
||||
/// Create new linear memory instance.
|
||||
fn new(initial: Pages, maximum: Option<Pages>) -> Result<Self, Error> {
|
||||
fn new(initial: Pages, maximum: Option<Pages>) -> Self {
|
||||
let limits = ResizableLimits::new(initial.0 as u32, maximum.map(|p| p.0 as u32));
|
||||
|
||||
let initial_size: Bytes = initial.into();
|
||||
Ok(MemoryInstance {
|
||||
MemoryInstance {
|
||||
limits: limits,
|
||||
buffer: RefCell::new(
|
||||
ByteBuf::new(initial_size.0).map_err(|err| Error::Memory(err.to_string()))?,
|
||||
),
|
||||
buffer: ::MyRefCell::new(Vec::with_capacity(4096)),
|
||||
initial: initial,
|
||||
current_size: Cell::new(initial_size.0),
|
||||
current_size: ::MyCell::new(initial_size.0),
|
||||
maximum: maximum,
|
||||
})
|
||||
lowest_used: ::MyCell::new(u32::max_value()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return linear memory limits.
|
||||
|
@ -171,6 +157,16 @@ impl MemoryInstance {
|
|||
self.maximum
|
||||
}
|
||||
|
||||
/// Returns lowest offset ever written or `u32::max_value()` if none.
|
||||
pub fn lowest_used(&self) -> u32 {
|
||||
self.lowest_used.get()
|
||||
}
|
||||
|
||||
/// Resets tracked lowest offset.
|
||||
pub fn reset_lowest_used(&self, addr: u32) {
|
||||
self.lowest_used.set(addr)
|
||||
}
|
||||
|
||||
/// Returns current linear memory size.
|
||||
///
|
||||
/// Maximum memory size cannot exceed `65536` pages or 4GiB.
|
||||
|
@ -191,18 +187,21 @@ impl MemoryInstance {
|
|||
/// );
|
||||
/// ```
|
||||
pub fn current_size(&self) -> Pages {
|
||||
Bytes(self.buffer.borrow().len()).round_up_to()
|
||||
Bytes(self.current_size.get()).round_up_to()
|
||||
}
|
||||
|
||||
/// Returns current used memory size in bytes.
|
||||
/// This is one more than the highest memory address that had been written to.
|
||||
pub fn used_size(&self) -> Bytes {
|
||||
Bytes(self.buffer.borrow().len())
|
||||
}
|
||||
|
||||
/// Get value from memory at given offset.
|
||||
pub fn get_value<T: LittleEndianConvert>(&self, offset: u32) -> Result<T, Error> {
|
||||
let mut buffer = self.buffer.borrow_mut();
|
||||
let region =
|
||||
self.checked_region(&mut buffer, offset as usize, ::core::mem::size_of::<T>())?;
|
||||
Ok(
|
||||
T::from_little_endian(&buffer.as_slice_mut()[region.range()])
|
||||
.expect("Slice size is checked"),
|
||||
)
|
||||
let region = self.checked_region(offset as usize, ::core::mem::size_of::<T>())?;
|
||||
|
||||
let buffer = self.buffer.borrow();
|
||||
Ok(T::from_little_endian(&buffer[region.range()]).expect("Slice size is checked"))
|
||||
}
|
||||
|
||||
/// Copy data from memory at given offset.
|
||||
|
@ -212,10 +211,10 @@ impl MemoryInstance {
|
|||
///
|
||||
/// [`get_into`]: #method.get_into
|
||||
pub fn get(&self, offset: u32, size: usize) -> Result<Vec<u8>, Error> {
|
||||
let mut buffer = self.buffer.borrow_mut();
|
||||
let region = self.checked_region(&mut buffer, offset as usize, size)?;
|
||||
let region = self.checked_region(offset as usize, size)?;
|
||||
|
||||
Ok(buffer.as_slice_mut()[region.range()].to_vec())
|
||||
let buffer = self.buffer.borrow();
|
||||
Ok(buffer[region.range()].to_vec())
|
||||
}
|
||||
|
||||
/// Copy data from given offset in the memory into `target` slice.
|
||||
|
@ -224,33 +223,37 @@ impl MemoryInstance {
|
|||
///
|
||||
/// Returns `Err` if the specified region is out of bounds.
|
||||
pub fn get_into(&self, offset: u32, target: &mut [u8]) -> Result<(), Error> {
|
||||
let mut buffer = self.buffer.borrow_mut();
|
||||
let region = self.checked_region(&mut buffer, offset as usize, target.len())?;
|
||||
let region = self.checked_region(offset as usize, target.len())?;
|
||||
|
||||
target.copy_from_slice(&buffer.as_slice_mut()[region.range()]);
|
||||
let buffer = self.buffer.borrow();
|
||||
target.copy_from_slice(&buffer[region.range()]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Copy data in the memory at given offset.
|
||||
pub fn set(&self, offset: u32, value: &[u8]) -> Result<(), Error> {
|
||||
let mut buffer = self.buffer.borrow_mut();
|
||||
let range = self
|
||||
.checked_region(&mut buffer, offset as usize, value.len())?
|
||||
.range();
|
||||
let range = self.checked_region(offset as usize, value.len())?.range();
|
||||
|
||||
buffer.as_slice_mut()[range].copy_from_slice(value);
|
||||
if offset < self.lowest_used.get() {
|
||||
self.lowest_used.set(offset);
|
||||
}
|
||||
let mut buffer = self.buffer.borrow_mut();
|
||||
buffer[range].copy_from_slice(value);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Copy value in the memory at given offset.
|
||||
pub fn set_value<T: LittleEndianConvert>(&self, offset: u32, value: T) -> Result<(), Error> {
|
||||
let mut buffer = self.buffer.borrow_mut();
|
||||
let range = self
|
||||
.checked_region(&mut buffer, offset as usize, ::core::mem::size_of::<T>())?
|
||||
.checked_region(offset as usize, ::core::mem::size_of::<T>())?
|
||||
.range();
|
||||
value.into_little_endian(&mut buffer.as_slice_mut()[range]);
|
||||
if offset < self.lowest_used.get() {
|
||||
self.lowest_used.set(offset);
|
||||
}
|
||||
let mut buffer = self.buffer.borrow_mut();
|
||||
value.into_little_endian(&mut buffer[range]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -284,22 +287,12 @@ impl MemoryInstance {
|
|||
}
|
||||
|
||||
let new_buffer_length: Bytes = new_size.into();
|
||||
self.buffer
|
||||
.borrow_mut()
|
||||
.realloc(new_buffer_length.0)
|
||||
.map_err(|err| Error::Memory(err.to_string()))?;
|
||||
|
||||
self.current_size.set(new_buffer_length.0);
|
||||
|
||||
Ok(size_before_grow)
|
||||
}
|
||||
|
||||
fn checked_region(
|
||||
&self,
|
||||
buffer: &mut ByteBuf,
|
||||
offset: usize,
|
||||
size: usize,
|
||||
) -> Result<CheckedRegion, Error> {
|
||||
fn checked_region(&self, offset: usize, size: usize) -> Result<CheckedRegion, Error> {
|
||||
let mut buffer = self.buffer.borrow_mut();
|
||||
let end = offset.checked_add(size).ok_or_else(|| {
|
||||
Error::Memory(format!(
|
||||
"trying to access memory block of size {} from offset {}",
|
||||
|
@ -307,6 +300,10 @@ impl MemoryInstance {
|
|||
))
|
||||
})?;
|
||||
|
||||
if end <= self.current_size.get() && buffer.len() < end {
|
||||
buffer.resize(end, 0);
|
||||
}
|
||||
|
||||
if end > buffer.len() {
|
||||
return Err(Error::Memory(format!(
|
||||
"trying to access region [{}..{}] in memory [0..{}]",
|
||||
|
@ -324,7 +321,6 @@ impl MemoryInstance {
|
|||
|
||||
fn checked_region_pair(
|
||||
&self,
|
||||
buffer: &mut ByteBuf,
|
||||
offset1: usize,
|
||||
size1: usize,
|
||||
offset2: usize,
|
||||
|
@ -344,6 +340,12 @@ impl MemoryInstance {
|
|||
))
|
||||
})?;
|
||||
|
||||
let mut buffer = self.buffer.borrow_mut();
|
||||
let max = cmp::max(end1, end2);
|
||||
if max <= self.current_size.get() && buffer.len() < max {
|
||||
buffer.resize(max, 0);
|
||||
}
|
||||
|
||||
if end1 > buffer.len() {
|
||||
return Err(Error::Memory(format!(
|
||||
"trying to access region [{}..{}] in memory [0..{}]",
|
||||
|
@ -382,15 +384,18 @@ impl MemoryInstance {
|
|||
///
|
||||
/// Returns `Err` if either of specified regions is out of bounds.
|
||||
pub fn copy(&self, src_offset: usize, dst_offset: usize, len: usize) -> Result<(), Error> {
|
||||
let mut buffer = self.buffer.borrow_mut();
|
||||
|
||||
let (read_region, write_region) =
|
||||
self.checked_region_pair(&mut buffer, src_offset, len, dst_offset, len)?;
|
||||
self.checked_region_pair(src_offset, len, dst_offset, len)?;
|
||||
|
||||
if dst_offset < self.lowest_used.get() as usize {
|
||||
self.lowest_used.set(dst_offset as u32);
|
||||
}
|
||||
|
||||
let mut buffer = self.buffer.borrow_mut();
|
||||
unsafe {
|
||||
::core::ptr::copy(
|
||||
buffer.as_slice()[read_region.range()].as_ptr(),
|
||||
buffer.as_slice_mut()[write_region.range()].as_mut_ptr(),
|
||||
buffer[read_region.range()].as_ptr(),
|
||||
buffer[write_region.range()].as_mut_ptr(),
|
||||
len,
|
||||
)
|
||||
}
|
||||
|
@ -415,10 +420,8 @@ impl MemoryInstance {
|
|||
dst_offset: usize,
|
||||
len: usize,
|
||||
) -> Result<(), Error> {
|
||||
let mut buffer = self.buffer.borrow_mut();
|
||||
|
||||
let (read_region, write_region) =
|
||||
self.checked_region_pair(&mut buffer, src_offset, len, dst_offset, len)?;
|
||||
self.checked_region_pair(src_offset, len, dst_offset, len)?;
|
||||
|
||||
if read_region.intersects(&write_region) {
|
||||
return Err(Error::Memory(format!(
|
||||
|
@ -426,10 +429,15 @@ impl MemoryInstance {
|
|||
)));
|
||||
}
|
||||
|
||||
if dst_offset < self.lowest_used.get() as usize {
|
||||
self.lowest_used.set(dst_offset as u32);
|
||||
}
|
||||
|
||||
let mut buffer = self.buffer.borrow_mut();
|
||||
unsafe {
|
||||
::core::ptr::copy_nonoverlapping(
|
||||
buffer.as_slice()[read_region.range()].as_ptr(),
|
||||
buffer.as_slice_mut()[write_region.range()].as_mut_ptr(),
|
||||
buffer[read_region.range()].as_ptr(),
|
||||
buffer[write_region.range()].as_mut_ptr(),
|
||||
len,
|
||||
)
|
||||
}
|
||||
|
@ -447,25 +455,22 @@ impl MemoryInstance {
|
|||
dst_offset: usize,
|
||||
len: usize,
|
||||
) -> Result<(), Error> {
|
||||
if Rc::ptr_eq(&src.0, &dst.0) {
|
||||
if ::MyRc::ptr_eq(&src.0, &dst.0) {
|
||||
// `transfer` is invoked with with same source and destination. Let's assume that regions may
|
||||
// overlap and use `copy`.
|
||||
return src.copy(src_offset, dst_offset, len);
|
||||
}
|
||||
|
||||
// Because memory references point to different memory instances, it is safe to `borrow_mut`
|
||||
// both buffers at once (modulo `with_direct_access_mut`).
|
||||
let mut src_buffer = src.buffer.borrow_mut();
|
||||
let src_range = src.checked_region(src_offset, len)?.range();
|
||||
let dst_range = dst.checked_region(dst_offset, len)?.range();
|
||||
|
||||
if dst_offset < dst.lowest_used.get() as usize {
|
||||
dst.lowest_used.set(dst_offset as u32);
|
||||
}
|
||||
|
||||
let mut dst_buffer = dst.buffer.borrow_mut();
|
||||
|
||||
let src_range = src
|
||||
.checked_region(&mut src_buffer, src_offset, len)?
|
||||
.range();
|
||||
let dst_range = dst
|
||||
.checked_region(&mut dst_buffer, dst_offset, len)?
|
||||
.range();
|
||||
|
||||
dst_buffer.as_slice_mut()[dst_range].copy_from_slice(&src_buffer.as_slice()[src_range]);
|
||||
let src_buffer = src.buffer.borrow();
|
||||
dst_buffer[dst_range].copy_from_slice(&src_buffer[src_range]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -478,11 +483,14 @@ impl MemoryInstance {
|
|||
///
|
||||
/// Returns `Err` if the specified region is out of bounds.
|
||||
pub fn clear(&self, offset: usize, new_val: u8, len: usize) -> Result<(), Error> {
|
||||
let range = self.checked_region(offset, len)?.range();
|
||||
|
||||
if offset < self.lowest_used.get() as usize {
|
||||
self.lowest_used.set(offset as u32);
|
||||
}
|
||||
|
||||
let mut buffer = self.buffer.borrow_mut();
|
||||
|
||||
let range = self.checked_region(&mut buffer, offset, len)?.range();
|
||||
|
||||
for val in &mut buffer.as_slice_mut()[range] {
|
||||
for val in &mut buffer[range] {
|
||||
*val = new_val
|
||||
}
|
||||
Ok(())
|
||||
|
@ -497,28 +505,18 @@ impl MemoryInstance {
|
|||
self.clear(offset, 0, len)
|
||||
}
|
||||
|
||||
/// Set every byte in the entire linear memory to 0, preserving its size.
|
||||
///
|
||||
/// Might be useful for some optimization shenanigans.
|
||||
pub fn erase(&self) -> Result<(), Error> {
|
||||
self.buffer
|
||||
.borrow_mut()
|
||||
.erase()
|
||||
.map_err(|err| Error::Memory(err.to_string()))
|
||||
}
|
||||
|
||||
/// Provides direct access to the underlying memory buffer.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Any call that requires write access to memory (such as [`set`], [`clear`], etc) made within
|
||||
/// the closure will panic.
|
||||
/// the closure will panic. Note that the buffer size may be arbitraty. Proceed with caution.
|
||||
///
|
||||
/// [`set`]: #method.get
|
||||
/// [`clear`]: #method.set
|
||||
pub fn with_direct_access<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
|
||||
let buf = self.buffer.borrow();
|
||||
f(buf.as_slice())
|
||||
f(&*buf)
|
||||
}
|
||||
|
||||
/// Provides direct mutable access to the underlying memory buffer.
|
||||
|
@ -526,13 +524,15 @@ impl MemoryInstance {
|
|||
/// # Panics
|
||||
///
|
||||
/// Any calls that requires either read or write access to memory (such as [`get`], [`set`], [`copy`], etc) made
|
||||
/// within the closure will panic. Proceed with caution.
|
||||
/// within the closure will panic. Note that the buffer size may be arbitraty.
|
||||
/// The closure may however resize it. Proceed with caution.
|
||||
///
|
||||
/// [`get`]: #method.get
|
||||
/// [`set`]: #method.set
|
||||
pub fn with_direct_access_mut<R, F: FnOnce(&mut [u8]) -> R>(&self, f: F) -> R {
|
||||
/// [`copy`]: #method.copy
|
||||
pub fn with_direct_access_mut<R, F: FnOnce(&mut Vec<u8>) -> R>(&self, f: F) -> R {
|
||||
let mut buf = self.buffer.borrow_mut();
|
||||
f(buf.as_slice_mut())
|
||||
f(&mut buf)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -541,12 +541,26 @@ mod tests {
|
|||
|
||||
use super::{MemoryInstance, MemoryRef, LINEAR_MEMORY_PAGE_SIZE};
|
||||
use memory_units::Pages;
|
||||
use std::rc::Rc;
|
||||
use Error;
|
||||
|
||||
#[test]
|
||||
fn alloc() {
|
||||
let mut fixtures = vec![
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
let fixtures = &[
|
||||
(0, None, true),
|
||||
(0, Some(0), true),
|
||||
(1, None, true),
|
||||
(1, Some(1), true),
|
||||
(0, Some(1), true),
|
||||
(1, Some(0), false),
|
||||
(0, Some(65536), true),
|
||||
(65536, Some(65536), true),
|
||||
(65536, Some(0), false),
|
||||
(65536, None, true),
|
||||
];
|
||||
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
let fixtures = &[
|
||||
(0, None, true),
|
||||
(0, Some(0), true),
|
||||
(1, None, true),
|
||||
|
@ -555,13 +569,6 @@ mod tests {
|
|||
(1, Some(0), false),
|
||||
];
|
||||
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
fixtures.extend(&[
|
||||
(65536, Some(65536), true),
|
||||
(65536, Some(0), false),
|
||||
(65536, None, true),
|
||||
]);
|
||||
|
||||
for (index, &(initial, maybe_max, expected_ok)) in fixtures.iter().enumerate() {
|
||||
let initial: Pages = Pages(initial);
|
||||
let maximum: Option<Pages> = maybe_max.map(|m| Pages(m));
|
||||
|
@ -582,7 +589,7 @@ mod tests {
|
|||
}
|
||||
|
||||
fn create_memory(initial_content: &[u8]) -> MemoryInstance {
|
||||
let mem = MemoryInstance::new(Pages(1), Some(Pages(1))).unwrap();
|
||||
let mem = MemoryInstance::new(Pages(1), Some(Pages(1)));
|
||||
mem.set(0, initial_content)
|
||||
.expect("Successful initialize the memory");
|
||||
mem
|
||||
|
@ -635,8 +642,8 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn transfer_works() {
|
||||
let src = MemoryRef(Rc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
|
||||
let dst = MemoryRef(Rc::new(create_memory(&[
|
||||
let src = MemoryRef(::MyRc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
|
||||
let dst = MemoryRef(::MyRc::new(create_memory(&[
|
||||
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
|
||||
])));
|
||||
|
||||
|
@ -651,7 +658,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn transfer_still_works_with_same_memory() {
|
||||
let src = MemoryRef(Rc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
|
||||
let src = MemoryRef(::MyRc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
|
||||
|
||||
MemoryInstance::transfer(&src, 4, &src, 0, 3).unwrap();
|
||||
|
||||
|
@ -660,7 +667,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn transfer_oob_with_same_memory_errors() {
|
||||
let src = MemoryRef(Rc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
|
||||
let src = MemoryRef(::MyRc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
|
||||
assert!(MemoryInstance::transfer(&src, 65535, &src, 0, 3).is_err());
|
||||
|
||||
// Check that memories content left untouched
|
||||
|
@ -669,8 +676,8 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn transfer_oob_errors() {
|
||||
let src = MemoryRef(Rc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
|
||||
let dst = MemoryRef(Rc::new(create_memory(&[
|
||||
let src = MemoryRef(::MyRc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
|
||||
let dst = MemoryRef(::MyRc::new(create_memory(&[
|
||||
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
|
||||
])));
|
||||
|
||||
|
@ -695,7 +702,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn get_into() {
|
||||
let mem = MemoryInstance::new(Pages(1), None).unwrap();
|
||||
let mem = MemoryInstance::new(Pages(1), None);
|
||||
mem.set(6, &[13, 17, 129])
|
||||
.expect("memory set should not fail");
|
||||
|
||||
|
@ -711,23 +718,17 @@ mod tests {
|
|||
let mem = MemoryInstance::alloc(Pages(1), None).unwrap();
|
||||
mem.set(100, &[0]).expect("memory set should not fail");
|
||||
mem.with_direct_access_mut(|buf| {
|
||||
assert_eq!(
|
||||
buf.len(),
|
||||
65536,
|
||||
"the buffer length is expected to be 1 page long"
|
||||
);
|
||||
assert_eq!(buf.len(), 101);
|
||||
buf[..10].copy_from_slice(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
|
||||
});
|
||||
mem.with_direct_access(|buf| {
|
||||
assert_eq!(
|
||||
buf.len(),
|
||||
65536,
|
||||
"the buffer length is expected to be 1 page long"
|
||||
);
|
||||
assert_eq!(buf.len(), 101);
|
||||
assert_eq!(&buf[..10], &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
|
||||
});
|
||||
}
|
||||
|
||||
// this test works only in the non-thread-safe variant, it deadlocks otherwise.
|
||||
#[cfg(not(feature = "threadsafe"))]
|
||||
#[should_panic]
|
||||
#[test]
|
||||
fn zero_copy_panics_on_nested_access() {
|
|
@ -1,189 +0,0 @@
|
|||
//! An implementation of a `ByteBuf` based on virtual memory.
|
||||
//!
|
||||
//! This implementation uses `mmap` on POSIX systems (and should use `VirtualAlloc` on windows).
|
||||
//! There are possibilities to improve the performance for the reallocating case by reserving
|
||||
//! memory up to maximum. This might be a problem for systems that don't have a lot of virtual
|
||||
//! memory (i.e. 32-bit platforms).
|
||||
|
||||
use std::ptr::{self, NonNull};
|
||||
use std::slice;
|
||||
|
||||
struct Mmap {
|
||||
/// The pointer that points to the start of the mapping.
|
||||
///
|
||||
/// This value doesn't change after creation.
|
||||
ptr: NonNull<u8>,
|
||||
/// The length of this mapping.
|
||||
///
|
||||
/// Cannot be more than `isize::max_value()`. This value doesn't change after creation.
|
||||
len: usize,
|
||||
}
|
||||
|
||||
impl Mmap {
|
||||
/// Create a new mmap mapping
|
||||
///
|
||||
/// Returns `Err` if:
|
||||
/// - `len` should not exceed `isize::max_value()`
|
||||
/// - `len` should be greater than 0.
|
||||
/// - `mmap` returns an error (almost certainly means out of memory).
|
||||
fn new(len: usize) -> Result<Self, &'static str> {
|
||||
if len > isize::max_value() as usize {
|
||||
return Err("`len` should not exceed `isize::max_value()`");
|
||||
}
|
||||
if len == 0 {
|
||||
return Err("`len` should be greater than 0");
|
||||
}
|
||||
|
||||
let ptr_or_err = unsafe {
|
||||
// Safety Proof:
|
||||
// There are not specific safety proofs are required for this call, since the call
|
||||
// by itself can't invoke any safety problems (however, misusing its result can).
|
||||
libc::mmap(
|
||||
// `addr` - let the system to choose the address at which to create the mapping.
|
||||
ptr::null_mut(),
|
||||
// the length of the mapping in bytes.
|
||||
len,
|
||||
// `prot` - protection flags: READ WRITE !EXECUTE
|
||||
libc::PROT_READ | libc::PROT_WRITE,
|
||||
// `flags`
|
||||
// `MAP_ANON` - mapping is not backed by any file and initial contents are
|
||||
// initialized to zero.
|
||||
// `MAP_PRIVATE` - the mapping is private to this process.
|
||||
libc::MAP_ANON | libc::MAP_PRIVATE,
|
||||
// `fildes` - a file descriptor. Pass -1 as this is required for some platforms
|
||||
// when the `MAP_ANON` is passed.
|
||||
-1,
|
||||
// `offset` - offset from the file.
|
||||
0,
|
||||
)
|
||||
};
|
||||
|
||||
match ptr_or_err {
|
||||
// With the current parameters, the error can only be returned in case of insufficient
|
||||
// memory.
|
||||
libc::MAP_FAILED => Err("mmap returned an error"),
|
||||
_ => {
|
||||
let ptr = NonNull::new(ptr_or_err as *mut u8).ok_or("mmap returned 0")?;
|
||||
Ok(Self { ptr, len })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn as_slice(&self) -> &[u8] {
|
||||
unsafe {
|
||||
// Safety Proof:
|
||||
// - Aliasing guarantees of `self.ptr` are not violated since `self` is the only owner.
|
||||
// - This pointer was allocated for `self.len` bytes and thus is a valid slice.
|
||||
// - `self.len` doesn't change throughout the lifetime of `self`.
|
||||
// - The value is returned valid for the duration of lifetime of `self`.
|
||||
// `self` cannot be destroyed while the returned slice is alive.
|
||||
// - `self.ptr` is of `NonNull` type and thus `.as_ptr()` can never return NULL.
|
||||
// - `self.len` cannot be larger than `isize::max_value()`.
|
||||
slice::from_raw_parts(self.ptr.as_ptr(), self.len)
|
||||
}
|
||||
}
|
||||
|
||||
fn as_slice_mut(&mut self) -> &mut [u8] {
|
||||
unsafe {
|
||||
// Safety Proof:
|
||||
// - See the proof for `Self::as_slice`
|
||||
// - Additionally, it is not possible to obtain two mutable references for `self.ptr`
|
||||
slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Mmap {
|
||||
fn drop(&mut self) {
|
||||
let ret_val = unsafe {
|
||||
// Safety proof:
|
||||
// - `self.ptr` was allocated by a call to `mmap`.
|
||||
// - `self.len` was saved at the same time and it doesn't change throughout the lifetime
|
||||
// of `self`.
|
||||
libc::munmap(self.ptr.as_ptr() as *mut libc::c_void, self.len)
|
||||
};
|
||||
|
||||
// There is no reason for `munmap` to fail to deallocate a private annonymous mapping
|
||||
// allocated by `mmap`.
|
||||
// However, for the cases when it actually fails prefer to fail, in order to not leak
|
||||
// and exhaust the virtual memory.
|
||||
assert_eq!(ret_val, 0, "munmap failed");
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ByteBuf {
|
||||
mmap: Option<Mmap>,
|
||||
}
|
||||
|
||||
impl ByteBuf {
|
||||
pub fn new(len: usize) -> Result<Self, &'static str> {
|
||||
let mmap = if len == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(Mmap::new(len)?)
|
||||
};
|
||||
Ok(Self { mmap })
|
||||
}
|
||||
|
||||
pub fn realloc(&mut self, new_len: usize) -> Result<(), &'static str> {
|
||||
let new_mmap = if new_len == 0 {
|
||||
None
|
||||
} else {
|
||||
let mut new_mmap = Mmap::new(new_len)?;
|
||||
if let Some(cur_mmap) = self.mmap.take() {
|
||||
let src = cur_mmap.as_slice();
|
||||
let dst = new_mmap.as_slice_mut();
|
||||
let amount = src.len().min(dst.len());
|
||||
dst[..amount].copy_from_slice(&src[..amount]);
|
||||
}
|
||||
Some(new_mmap)
|
||||
};
|
||||
|
||||
self.mmap = new_mmap;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.mmap.as_ref().map(|m| m.len).unwrap_or(0)
|
||||
}
|
||||
|
||||
pub fn as_slice(&self) -> &[u8] {
|
||||
self.mmap.as_ref().map(|m| m.as_slice()).unwrap_or(&[])
|
||||
}
|
||||
|
||||
pub fn as_slice_mut(&mut self) -> &mut [u8] {
|
||||
self.mmap
|
||||
.as_mut()
|
||||
.map(|m| m.as_slice_mut())
|
||||
.unwrap_or(&mut [])
|
||||
}
|
||||
|
||||
pub fn erase(&mut self) -> Result<(), &'static str> {
|
||||
let len = self.len();
|
||||
if len > 0 {
|
||||
// The order is important.
|
||||
//
|
||||
// 1. First we clear, and thus drop, the current mmap if any.
|
||||
// 2. And then we create a new one.
|
||||
//
|
||||
// Otherwise we double the peak memory consumption.
|
||||
self.mmap = None;
|
||||
self.mmap = Some(Mmap::new(len)?);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::ByteBuf;
|
||||
|
||||
const PAGE_SIZE: usize = 4096;
|
||||
|
||||
// This is not required since wasm memories can only grow but nice to have.
|
||||
#[test]
|
||||
fn byte_buf_shrink() {
|
||||
let mut byte_buf = ByteBuf::new(PAGE_SIZE * 3).unwrap();
|
||||
byte_buf.realloc(PAGE_SIZE * 2).unwrap();
|
||||
}
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
//! An implementation of `ByteBuf` based on a plain `Vec`.
|
||||
|
||||
use alloc::vec::Vec;
|
||||
|
||||
pub struct ByteBuf {
|
||||
buf: Vec<u8>,
|
||||
}
|
||||
|
||||
impl ByteBuf {
|
||||
pub fn new(len: usize) -> Result<Self, &'static str> {
|
||||
let mut buf = Vec::new();
|
||||
buf.resize(len, 0u8);
|
||||
Ok(Self { buf })
|
||||
}
|
||||
|
||||
pub fn realloc(&mut self, new_len: usize) -> Result<(), &'static str> {
|
||||
self.buf.resize(new_len, 0u8);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.buf.len()
|
||||
}
|
||||
|
||||
pub fn as_slice(&self) -> &[u8] {
|
||||
self.buf.as_ref()
|
||||
}
|
||||
|
||||
pub fn as_slice_mut(&mut self) -> &mut [u8] {
|
||||
self.buf.as_mut()
|
||||
}
|
||||
|
||||
pub fn erase(&mut self) -> Result<(), &'static str> {
|
||||
for v in &mut self.buf {
|
||||
*v = 0;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,16 +1,10 @@
|
|||
use alloc::{
|
||||
borrow::ToOwned,
|
||||
rc::Rc,
|
||||
string::{String, ToString},
|
||||
vec::Vec,
|
||||
};
|
||||
use core::cell::RefCell;
|
||||
#[allow(unused_imports)]
|
||||
use alloc::prelude::v1::*;
|
||||
use core::fmt;
|
||||
use Trap;
|
||||
|
||||
use alloc::collections::BTreeMap;
|
||||
|
||||
use core::cell::Ref;
|
||||
use func::{FuncBody, FuncInstance, FuncRef};
|
||||
use global::{GlobalInstance, GlobalRef};
|
||||
use host::Externals;
|
||||
|
@ -38,7 +32,7 @@ use {Error, MemoryInstance, Module, RuntimeValue, Signature, TableInstance};
|
|||
///
|
||||
/// [`ModuleInstance`]: struct.ModuleInstance.html
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ModuleRef(pub(crate) Rc<ModuleInstance>);
|
||||
pub struct ModuleRef(pub(crate) ::MyRc<ModuleInstance>);
|
||||
|
||||
impl ::core::ops::Deref for ModuleRef {
|
||||
type Target = ModuleInstance;
|
||||
|
@ -157,23 +151,23 @@ impl ExternVal {
|
|||
/// [`invoke_export`]: #method.invoke_export
|
||||
#[derive(Debug)]
|
||||
pub struct ModuleInstance {
|
||||
signatures: RefCell<Vec<Rc<Signature>>>,
|
||||
tables: RefCell<Vec<TableRef>>,
|
||||
funcs: RefCell<Vec<FuncRef>>,
|
||||
memories: RefCell<Vec<MemoryRef>>,
|
||||
globals: RefCell<Vec<GlobalRef>>,
|
||||
exports: RefCell<BTreeMap<String, ExternVal>>,
|
||||
signatures: ::MyRefCell<Vec<::MyRc<Signature>>>,
|
||||
tables: ::MyRefCell<Vec<TableRef>>,
|
||||
funcs: ::MyRefCell<Vec<FuncRef>>,
|
||||
memories: ::MyRefCell<Vec<MemoryRef>>,
|
||||
globals: ::MyRefCell<Vec<GlobalRef>>,
|
||||
exports: ::MyRefCell<BTreeMap<String, ExternVal>>,
|
||||
}
|
||||
|
||||
impl ModuleInstance {
|
||||
fn default() -> Self {
|
||||
ModuleInstance {
|
||||
funcs: RefCell::new(Vec::new()),
|
||||
signatures: RefCell::new(Vec::new()),
|
||||
tables: RefCell::new(Vec::new()),
|
||||
memories: RefCell::new(Vec::new()),
|
||||
globals: RefCell::new(Vec::new()),
|
||||
exports: RefCell::new(BTreeMap::new()),
|
||||
funcs: ::MyRefCell::new(Vec::new()),
|
||||
signatures: ::MyRefCell::new(Vec::new()),
|
||||
tables: ::MyRefCell::new(Vec::new()),
|
||||
memories: ::MyRefCell::new(Vec::new()),
|
||||
globals: ::MyRefCell::new(Vec::new()),
|
||||
exports: ::MyRefCell::new(BTreeMap::new()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -193,7 +187,7 @@ impl ModuleInstance {
|
|||
self.funcs.borrow().get(idx as usize).cloned()
|
||||
}
|
||||
|
||||
pub(crate) fn signature_by_index(&self, idx: u32) -> Option<Rc<Signature>> {
|
||||
pub(crate) fn signature_by_index(&self, idx: u32) -> Option<::MyRc<Signature>> {
|
||||
self.signatures.borrow().get(idx as usize).cloned()
|
||||
}
|
||||
|
||||
|
@ -201,7 +195,7 @@ impl ModuleInstance {
|
|||
self.funcs.borrow_mut().push(func);
|
||||
}
|
||||
|
||||
fn push_signature(&self, signature: Rc<Signature>) {
|
||||
fn push_signature(&self, signature: ::MyRc<Signature>) {
|
||||
self.signatures.borrow_mut().push(signature)
|
||||
}
|
||||
|
||||
|
@ -219,7 +213,7 @@ impl ModuleInstance {
|
|||
|
||||
/// Access all globals. This is a non-standard API so it's unlikely to be
|
||||
/// portable to other engines.
|
||||
pub fn globals<'a>(&self) -> Ref<Vec<GlobalRef>> {
|
||||
pub fn globals<'a>(&self) -> ::MyRefRead<Vec<GlobalRef>> {
|
||||
self.globals.borrow()
|
||||
}
|
||||
|
||||
|
@ -232,10 +226,10 @@ impl ModuleInstance {
|
|||
extern_vals: I,
|
||||
) -> Result<ModuleRef, Error> {
|
||||
let module = loaded_module.module();
|
||||
let instance = ModuleRef(Rc::new(ModuleInstance::default()));
|
||||
let instance = ModuleRef(::MyRc::new(ModuleInstance::default()));
|
||||
|
||||
for &Type::Function(ref ty) in module.type_section().map(|ts| ts.types()).unwrap_or(&[]) {
|
||||
let signature = Rc::new(Signature::from_elements(ty));
|
||||
let signature = ::MyRc::new(Signature::from_elements(ty));
|
||||
instance.push_signature(signature);
|
||||
}
|
||||
|
||||
|
@ -329,8 +323,11 @@ impl ModuleInstance {
|
|||
locals: body.locals().to_vec(),
|
||||
code: code,
|
||||
};
|
||||
let func_instance =
|
||||
FuncInstance::alloc_internal(Rc::downgrade(&instance.0), signature, func_body);
|
||||
let func_instance = FuncInstance::alloc_internal(
|
||||
::MyRc::downgrade(&instance.0),
|
||||
signature,
|
||||
func_body,
|
||||
);
|
||||
instance.push_func(func_instance);
|
||||
}
|
||||
}
|
||||
|
@ -421,11 +418,7 @@ impl ModuleInstance {
|
|||
.map(|es| es.entries())
|
||||
.unwrap_or(&[])
|
||||
{
|
||||
let offset = element_segment
|
||||
.offset()
|
||||
.as_ref()
|
||||
.expect("passive segments are rejected due to validation");
|
||||
let offset_val = match eval_init_expr(offset, &module_ref) {
|
||||
let offset_val = match eval_init_expr(element_segment.offset(), &module_ref) {
|
||||
RuntimeValue::I32(v) => v as u32,
|
||||
_ => panic!("Due to validation elem segment offset should evaluate to i32"),
|
||||
};
|
||||
|
@ -454,11 +447,7 @@ impl ModuleInstance {
|
|||
}
|
||||
|
||||
for data_segment in module.data_section().map(|ds| ds.entries()).unwrap_or(&[]) {
|
||||
let offset = data_segment
|
||||
.offset()
|
||||
.as_ref()
|
||||
.expect("passive segments are rejected due to validation");
|
||||
let offset_val = match eval_init_expr(offset, &module_ref) {
|
||||
let offset_val = match eval_init_expr(data_segment.offset(), &module_ref) {
|
||||
RuntimeValue::I32(v) => v as u32,
|
||||
_ => panic!("Due to validation data segment offset should evaluate to i32"),
|
||||
};
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
pub use alloc::rc::Rc as MyRc;
|
||||
pub use alloc::rc::Weak as MyWeak;
|
||||
pub use core::cell::Cell as MyCell;
|
||||
pub use core::cell::Ref as MyRefRead;
|
||||
pub use core::cell::Ref as MyRefWrite;
|
||||
pub use core::cell::RefCell as MyRefCell;
|
|
@ -1,4 +1,5 @@
|
|||
use alloc::{string::String, vec::Vec};
|
||||
#[allow(unused_imports)]
|
||||
use alloc::prelude::v1::*;
|
||||
|
||||
use parity_wasm::elements::{BlockType, FuncBody, Instruction};
|
||||
|
||||
|
@ -251,14 +252,13 @@ impl Compiler {
|
|||
);
|
||||
self.sink.emit_br_nez(target);
|
||||
}
|
||||
BrTable(ref br_table_data) => {
|
||||
BrTable(ref table, default) => {
|
||||
// At this point, the condition value is at the top of the stack.
|
||||
// But at the point of actual jump the condition will already be
|
||||
// popped off.
|
||||
let value_stack_height = context.value_stack.len().saturating_sub(1);
|
||||
|
||||
let targets = br_table_data
|
||||
.table
|
||||
let targets = table
|
||||
.iter()
|
||||
.map(|depth| {
|
||||
require_target(
|
||||
|
@ -270,7 +270,7 @@ impl Compiler {
|
|||
})
|
||||
.collect::<Result<Vec<_>, _>>();
|
||||
let default_target = require_target(
|
||||
br_table_data.default,
|
||||
default,
|
||||
value_stack_height,
|
||||
&context.frame_stack,
|
||||
&self.label_stack,
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use alloc::vec::Vec;
|
||||
#[allow(unused_imports)]
|
||||
use alloc::prelude::v1::*;
|
||||
|
||||
use crate::{
|
||||
isa,
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use alloc::{boxed::Box, vec::Vec};
|
||||
#[allow(unused_imports)]
|
||||
use alloc::prelude::v1::*;
|
||||
use core::fmt;
|
||||
use core::ops;
|
||||
use core::{u32, usize};
|
||||
|
@ -1289,8 +1290,14 @@ impl FunctionContext {
|
|||
debug_assert!(!self.is_initialized);
|
||||
|
||||
let num_locals = locals.iter().map(|l| l.count() as usize).sum();
|
||||
let locals = vec![Default::default(); num_locals];
|
||||
|
||||
value_stack.extend(num_locals)?;
|
||||
// TODO: Replace with extend.
|
||||
for local in locals {
|
||||
value_stack
|
||||
.push(local)
|
||||
.map_err(|_| TrapKind::StackOverflow)?;
|
||||
}
|
||||
|
||||
self.is_initialized = true;
|
||||
Ok(())
|
||||
|
@ -1436,18 +1443,6 @@ impl ValueStack {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn extend(&mut self, len: usize) -> Result<(), TrapKind> {
|
||||
let cells = self
|
||||
.buf
|
||||
.get_mut(self.sp..self.sp + len)
|
||||
.ok_or_else(|| TrapKind::StackOverflow)?;
|
||||
for cell in cells {
|
||||
*cell = Default::default();
|
||||
}
|
||||
self.sp += len;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn len(&self) -> usize {
|
||||
self.sp
|
||||
|
@ -1498,22 +1493,6 @@ impl StackRecycler {
|
|||
}
|
||||
}
|
||||
|
||||
/// Clears any values left on the stack to avoid
|
||||
/// leaking them to future export invocations.
|
||||
///
|
||||
/// This is a secondary defense to prevent modules from
|
||||
/// exploiting faulty stack handling in the interpreter.
|
||||
///
|
||||
/// Do note that there are additional channels that
|
||||
/// can leak information into an untrusted module.
|
||||
pub fn clear(&mut self) {
|
||||
if let Some(buf) = &mut self.value_stack_buf {
|
||||
for cell in buf.iter_mut() {
|
||||
*cell = RuntimeValueInternal(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn recreate_value_stack(this: &mut Option<&mut Self>) -> ValueStack {
|
||||
let limit = this
|
||||
.as_ref()
|
||||
|
@ -1547,6 +1526,10 @@ impl StackRecycler {
|
|||
}
|
||||
|
||||
pub(crate) fn recycle(&mut self, mut interpreter: Interpreter) {
|
||||
for cell in interpreter.value_stack.buf.iter_mut() {
|
||||
*cell = RuntimeValueInternal(0);
|
||||
}
|
||||
|
||||
interpreter.call_stack.buf.clear();
|
||||
|
||||
self.value_stack_buf = Some(interpreter.value_stack.buf);
|
||||
|
|
12
src/table.rs
12
src/table.rs
|
@ -1,5 +1,5 @@
|
|||
use alloc::{rc::Rc, vec::Vec};
|
||||
use core::cell::RefCell;
|
||||
#[allow(unused_imports)]
|
||||
use alloc::prelude::v1::*;
|
||||
use core::fmt;
|
||||
use core::u32;
|
||||
use func::FuncRef;
|
||||
|
@ -14,7 +14,7 @@ use Error;
|
|||
/// [`TableInstance`]: struct.TableInstance.html
|
||||
///
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TableRef(Rc<TableInstance>);
|
||||
pub struct TableRef(::MyRc<TableInstance>);
|
||||
|
||||
impl ::core::ops::Deref for TableRef {
|
||||
type Target = TableInstance;
|
||||
|
@ -40,7 +40,7 @@ pub struct TableInstance {
|
|||
/// Table limits.
|
||||
limits: ResizableLimits,
|
||||
/// Table memory buffer.
|
||||
buffer: RefCell<Vec<Option<FuncRef>>>,
|
||||
buffer: ::MyRefCell<Vec<Option<FuncRef>>>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for TableInstance {
|
||||
|
@ -65,13 +65,13 @@ impl TableInstance {
|
|||
/// Returns `Err` if `initial_size` is greater than `maximum_size`.
|
||||
pub fn alloc(initial_size: u32, maximum_size: Option<u32>) -> Result<TableRef, Error> {
|
||||
let table = TableInstance::new(ResizableLimits::new(initial_size, maximum_size))?;
|
||||
Ok(TableRef(Rc::new(table)))
|
||||
Ok(TableRef(::MyRc::new(table)))
|
||||
}
|
||||
|
||||
fn new(limits: ResizableLimits) -> Result<TableInstance, Error> {
|
||||
check_limits(&limits)?;
|
||||
Ok(TableInstance {
|
||||
buffer: RefCell::new(vec![None; limits.initial() as usize]),
|
||||
buffer: ::MyRefCell::new(vec![None; limits.initial() as usize]),
|
||||
limits: limits,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -285,7 +285,7 @@ fn resume_call_host_func() {
|
|||
let export = instance.export_by_name("test").unwrap();
|
||||
let func_instance = export.as_func().unwrap();
|
||||
|
||||
let mut invocation = FuncInstance::invoke_resumable(&func_instance, &[][..]).unwrap();
|
||||
let mut invocation = FuncInstance::invoke_resumable(&func_instance, &[]).unwrap();
|
||||
let result = invocation.start_execution(&mut env);
|
||||
match result {
|
||||
Err(ResumableError::Trap(_)) => {}
|
||||
|
@ -330,7 +330,7 @@ fn resume_call_host_func_type_mismatch() {
|
|||
let export = instance.export_by_name("test").unwrap();
|
||||
let func_instance = export.as_func().unwrap();
|
||||
|
||||
let mut invocation = FuncInstance::invoke_resumable(&func_instance, &[][..]).unwrap();
|
||||
let mut invocation = FuncInstance::invoke_resumable(&func_instance, &[]).unwrap();
|
||||
let result = invocation.start_execution(&mut env);
|
||||
match result {
|
||||
Err(ResumableError::Trap(_)) => {}
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
extern crate atomic;
|
||||
|
||||
use alloc::sync::{Arc, RwLock};
|
||||
|
||||
pub use self::atomic::{Atomic, Ordering::Relaxed as Ordering};
|
||||
pub use alloc::sync::{
|
||||
Arc as MyRc, RwLockReadGuard as MyRefRead, RwLockWriteGuard as MyRefWrite, Weak as MyWeak,
|
||||
};
|
||||
|
||||
/// Thread-safe wrapper which can be used in place of a `RefCell`.
|
||||
#[derive(Debug)]
|
||||
pub struct MyRefCell<T>(Arc<RwLock<T>>);
|
||||
|
||||
impl<T> MyRefCell<T> {
|
||||
/// Create new wrapper object.
|
||||
pub fn new(obj: T) -> MyRefCell<T> {
|
||||
MyRefCell(Arc::new(RwLock::new(obj)))
|
||||
}
|
||||
|
||||
/// Borrow a `MyRef` to the inner value.
|
||||
pub fn borrow(&self) -> ::MyRefRead<T> {
|
||||
self.0
|
||||
.read()
|
||||
.expect("failed to acquire lock while trying to borrow")
|
||||
}
|
||||
|
||||
/// Borrow a mutable `MyRef` to the inner value.
|
||||
pub fn borrow_mut(&self) -> ::MyRefWrite<T> {
|
||||
self.0
|
||||
.write()
|
||||
.expect("failed to acquire lock while trying to borrow mutably")
|
||||
}
|
||||
}
|
||||
|
||||
/// Thread-safe wrapper which can be used in place of a `Cell`.
|
||||
#[derive(Debug)]
|
||||
pub struct MyCell<T>(Atomic<T>)
|
||||
where
|
||||
T: Copy;
|
||||
|
||||
impl<T> MyCell<T>
|
||||
where
|
||||
T: Copy,
|
||||
{
|
||||
/// Create new wrapper object.
|
||||
pub fn new(obj: T) -> MyCell<T> {
|
||||
MyCell(Atomic::new(obj))
|
||||
}
|
||||
|
||||
/// Returns the inner value.
|
||||
pub fn get(&self) -> T {
|
||||
self.0.load(::Ordering)
|
||||
}
|
||||
|
||||
/// Sets the inner value.
|
||||
pub fn set(&self, val: T) {
|
||||
self.0.store(val, ::Ordering);
|
||||
}
|
||||
}
|
27
src/value.rs
27
src/value.rs
|
@ -368,7 +368,6 @@ impl WrapInto<F32> for F64 {
|
|||
macro_rules! impl_try_truncate_into {
|
||||
(@primitive $from: ident, $into: ident, $to_primitive:path) => {
|
||||
impl TryTruncateInto<$into, TrapKind> for $from {
|
||||
#[cfg(feature = "std")]
|
||||
fn try_truncate_into(self) -> Result<$into, TrapKind> {
|
||||
// Casting from a float to an integer will round the float towards zero
|
||||
num_rational::BigRational::from_float(self)
|
||||
|
@ -376,23 +375,6 @@ macro_rules! impl_try_truncate_into {
|
|||
.and_then(|val| $to_primitive(&val))
|
||||
.ok_or(TrapKind::InvalidConversionToInt)
|
||||
}
|
||||
#[cfg(not(feature = "std"))]
|
||||
fn try_truncate_into(self) -> Result<$into, TrapKind> {
|
||||
// Casting from a float to an integer will round the float towards zero
|
||||
// NOTE: currently this will cause Undefined Behavior if the rounded value cannot be represented by the
|
||||
// target integer type. This includes Inf and NaN. This is a bug and will be fixed.
|
||||
if self.is_nan() || self.is_infinite() {
|
||||
return Err(TrapKind::InvalidConversionToInt);
|
||||
}
|
||||
|
||||
// range check
|
||||
let result = self as $into;
|
||||
if result as $from != self.trunc() {
|
||||
return Err(TrapKind::InvalidConversionToInt);
|
||||
}
|
||||
|
||||
Ok(self as $into)
|
||||
}
|
||||
}
|
||||
};
|
||||
(@wrapped $from:ident, $intermediate:ident, $into:ident) => {
|
||||
|
@ -837,6 +819,15 @@ impl_integer!(u32);
|
|||
impl_integer!(i64);
|
||||
impl_integer!(u64);
|
||||
|
||||
// Use std float functions in std environment.
|
||||
// And libm's implementation in no_std
|
||||
#[cfg(feature = "std")]
|
||||
macro_rules! call_math {
|
||||
($op:ident, $e:expr, $fXX:ident, $FXXExt:ident) => {
|
||||
$fXX::$op($e)
|
||||
};
|
||||
}
|
||||
#[cfg(not(feature = "std"))]
|
||||
macro_rules! call_math {
|
||||
($op:ident, $e:expr, $fXX:ident, $FXXExt:ident) => {
|
||||
::libm::$FXXExt::$op($e)
|
||||
|
|
2
test.sh
2
test.sh
|
@ -16,4 +16,6 @@ cd $(dirname $0)
|
|||
|
||||
time cargo test --all ${EXTRA_ARGS}
|
||||
|
||||
time cargo test --all --features threadsafe
|
||||
|
||||
cd -
|
||||
|
|
|
@ -18,7 +18,6 @@ fn spec_to_runtime_value(val: Value<u32, u64>) -> RuntimeValue {
|
|||
Value::I64(v) => RuntimeValue::I64(v),
|
||||
Value::F32(v) => RuntimeValue::F32(v.into()),
|
||||
Value::F64(v) => RuntimeValue::F64(v.into()),
|
||||
Value::V128(_) => panic!("v128 is not supported"),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "wasmi-validation"
|
||||
version = "0.2.0"
|
||||
version = "0.1.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
edition = "2018"
|
||||
license = "MIT/Apache-2.0"
|
||||
|
@ -8,7 +8,7 @@ repository = "https://github.com/paritytech/wasmi"
|
|||
description = "Wasm code validator"
|
||||
|
||||
[dependencies]
|
||||
parity-wasm = { version = "0.40.1", default-features = false }
|
||||
parity-wasm = { version = "0.31", default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_matches = "1.1"
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use crate::Error;
|
||||
use alloc::vec::Vec;
|
||||
#[allow(unused_imports)]
|
||||
use alloc::prelude::v1::*;
|
||||
use parity_wasm::elements::{
|
||||
BlockType, FunctionType, GlobalType, MemoryType, TableType, ValueType,
|
||||
};
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
#[allow(unused_imports)]
|
||||
use alloc::prelude::v1::*;
|
||||
|
||||
use crate::{
|
||||
context::ModuleContext, stack::StackWithLimit, util::Locals, Error, FuncValidator,
|
||||
DEFAULT_MEMORY_INDEX, DEFAULT_TABLE_INDEX,
|
||||
|
@ -266,8 +269,8 @@ impl<'a> FunctionValidationContext<'a> {
|
|||
BrIf(depth) => {
|
||||
self.validate_br_if(depth)?;
|
||||
}
|
||||
BrTable(ref br_table_data) => {
|
||||
self.validate_br_table(&*br_table_data.table, br_table_data.default)?;
|
||||
BrTable(ref table, default) => {
|
||||
self.validate_br_table(table, default)?;
|
||||
make_top_frame_polymorphic(&mut self.value_stack, &mut self.frame_stack);
|
||||
}
|
||||
Return => {
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
// #![warn(missing_docs)]
|
||||
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
//// alloc is required in no_std
|
||||
#![cfg_attr(not(feature = "std"), feature(alloc, alloc_prelude))]
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
#[macro_use]
|
||||
|
@ -19,7 +21,8 @@ pub const DEFAULT_TABLE_INDEX: u32 = 0;
|
|||
/// Maximal number of pages that a wasm instance supports.
|
||||
pub const LINEAR_MEMORY_MAX_PAGES: u32 = 65536;
|
||||
|
||||
use alloc::{string::String, vec::Vec};
|
||||
#[allow(unused_imports)]
|
||||
use alloc::prelude::v1::*;
|
||||
use core::fmt;
|
||||
#[cfg(feature = "std")]
|
||||
use std::error;
|
||||
|
@ -319,11 +322,7 @@ pub fn validate_module<V: Validator>(module: &Module) -> Result<V::Output, Error
|
|||
if let Some(data_section) = module.data_section() {
|
||||
for data_segment in data_section.entries() {
|
||||
context.require_memory(data_segment.index())?;
|
||||
let offset = data_segment
|
||||
.offset()
|
||||
.as_ref()
|
||||
.ok_or_else(|| Error("passive memory segments are not supported".into()))?;
|
||||
let init_ty = expr_const_type(&offset, context.globals())?;
|
||||
let init_ty = expr_const_type(data_segment.offset(), context.globals())?;
|
||||
if init_ty != ValueType::I32 {
|
||||
return Err(Error("segment offset should return I32".into()));
|
||||
}
|
||||
|
@ -334,11 +333,8 @@ pub fn validate_module<V: Validator>(module: &Module) -> Result<V::Output, Error
|
|||
if let Some(element_section) = module.elements_section() {
|
||||
for element_segment in element_section.entries() {
|
||||
context.require_table(element_segment.index())?;
|
||||
let offset = element_segment
|
||||
.offset()
|
||||
.as_ref()
|
||||
.ok_or_else(|| Error("passive element segments are not supported".into()))?;
|
||||
let init_ty = expr_const_type(&offset, context.globals())?;
|
||||
|
||||
let init_ty = expr_const_type(element_segment.offset(), context.globals())?;
|
||||
if init_ty != ValueType::I32 {
|
||||
return Err(Error("segment offset should return I32".into()));
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use alloc::{string::String, vec::Vec};
|
||||
#[allow(unused_imports)]
|
||||
use alloc::prelude::v1::*;
|
||||
|
||||
use core::fmt;
|
||||
#[cfg(feature = "std")]
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use crate::Error;
|
||||
use alloc::string::String;
|
||||
#[allow(unused_imports)]
|
||||
use alloc::prelude::v1::*;
|
||||
use parity_wasm::elements::{Local, ValueType};
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
Loading…
Reference in New Issue