diff --git a/.travis.yml b/.travis.yml index 03c14d8..68a5cda 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,6 +3,9 @@ sudo: required language: - rust - cpp +rust: + - nightly + - stable addons: apt: sources: @@ -12,18 +15,16 @@ addons: - g++-6 - cmake env: -- NIGHTLY_TOOLCHAIN=nightly-2018-02-05 +- CC=/usr/bin/gcc-6 CXX=/usr/bin/g++-6 install: # Install `cargo-deadlinks` unless it is currently installed. - command -v cargo-deadlinks &> /dev/null || cargo install cargo-deadlinks -# Install nightly toolchain. -- rustup toolchain install $NIGHTLY_TOOLCHAIN +- if [ "$TRAVIS_RUST_VERSION" == "nightly" ]; then rustup target add wasm32-unknown-unknown; fi script: -- export CC=/usr/bin/gcc-6 -- export CXX=/usr/bin/g++-6 -# Make sure fuzz targets are not broken. -- rustup run $NIGHTLY_TOOLCHAIN cargo check --tests --manifest-path=fuzz/Cargo.toml +# Make sure nightly targets are not broken. +- if [ "$TRAVIS_RUST_VERSION" == "nightly" ]; then cargo check --tests --manifest-path=fuzz/Cargo.toml; fi +- if [ "$TRAVIS_RUST_VERSION" == "nightly" ]; then cargo check --benches --manifest-path=benches/Cargo.toml; fi - ./test.sh - ./doc.sh after_success: | diff --git a/Cargo.toml b/Cargo.toml index c6002ad..47a6e74 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "wasmi" -version = "0.1.2" +version = "0.2.0" authors = ["Nikolay Volf ", "Svyatoslav Nikolsky ", "Sergey Pepyakin "] license = "MIT/Apache-2.0" readme = "README.md" @@ -8,19 +8,15 @@ repository = "https://github.com/paritytech/wasmi" documentation = "https://paritytech.github.io/wasmi/" description = "WebAssembly interpreter" keywords = ["wasm", "webassembly", "bytecode", "interpreter"] -exclude = [ "/res/*", "/tests/*", "/fuzz/*" ] +exclude = [ "/res/*", "/tests/*", "/fuzz/*", "/benches/*" ] [dependencies] # parity-wasm = "0.27" parity-wasm = { git = "https://github.com/paritytech/parity-wasm.git", rev = "0a61083238d8d9d8d9f6451a5d0da17674b11c21" } byteorder = "1.0" memory_units = "0.3.0" +nan-preserving-float = "0.1.0" [dev-dependencies] -wabt = "~0.2.2" - -[features] -# 32-bit platforms are not supported and not tested. Use this flag if you really want to use -# wasmi on these platforms. -# See https://github.com/pepyakin/wasmi/issues/43 -opt-in-32bit = [] +assert_matches = "1.1" +wabt = "0.3" diff --git a/benches/.gitignore b/benches/.gitignore new file mode 100644 index 0000000..ffc9328 --- /dev/null +++ b/benches/.gitignore @@ -0,0 +1,3 @@ +/target +*.trace + diff --git a/benches/Cargo.toml b/benches/Cargo.toml new file mode 100644 index 0000000..76abeda --- /dev/null +++ b/benches/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "benches" +version = "0.1.0" +authors = ["Sergey Pepyakin "] + +[dependencies] +wasmi = { path = ".." } +assert_matches = "1.2" + +[profile.bench] +debug = true diff --git a/benches/build.rs b/benches/build.rs new file mode 100644 index 0000000..9cb9ac1 --- /dev/null +++ b/benches/build.rs @@ -0,0 +1,32 @@ +use std::env; +use std::process; + + +fn main() { + println!("cargo:rerun-if-changed=./wasm-kernel/"); + + // The CARGO environment variable provides a path to the executable that + // runs this build process. + let cargo_bin = env::var("CARGO").expect("CARGO env variable should be defined"); + + // Build a release version of wasm-kernel. The code in the output wasm binary + // will be used in benchmarks. + let output = process::Command::new(cargo_bin) + .arg("build") + .arg("--target=wasm32-unknown-unknown") + .arg("--release") + .arg("--manifest-path=./wasm-kernel/Cargo.toml") + .arg("--verbose") + .output() + .expect("failed to execute `cargo`"); + + if !output.status.success() { + let msg = format!( + "status: {status}\nstdout: {stdout}\nstderr: {stderr}\n", + status=output.status, + stdout=String::from_utf8_lossy(&output.stdout), + stderr=String::from_utf8_lossy(&output.stderr), + ); + panic!("{}", msg); + } +} diff --git a/benches/src/lib.rs b/benches/src/lib.rs new file mode 100644 index 0000000..db01ccd --- /dev/null +++ b/benches/src/lib.rs @@ -0,0 +1,43 @@ +#![feature(test)] + +extern crate test; +extern crate wasmi; +#[macro_use] +extern crate assert_matches; + +use std::error; +use std::fs::File; +use wasmi::{ImportsBuilder, Module, ModuleInstance, NopExternals, RuntimeValue}; + +use test::Bencher; + +// Load a module from a file. +fn load_from_file(filename: &str) -> Result> { + use std::io::prelude::*; + let mut file = File::open(filename)?; + let mut buf = Vec::new(); + file.read_to_end(&mut buf)?; + Ok(Module::from_buffer(buf)?) +} + +#[bench] +fn bench_tiny_keccak(b: &mut Bencher) { + let wasm_kernel = load_from_file( + "./wasm-kernel/target/wasm32-unknown-unknown/release/wasm_kernel.wasm", + ).expect("failed to load wasm_kernel. Is `build.rs` broken?"); + + let instance = ModuleInstance::new(&wasm_kernel, &ImportsBuilder::default()) + .expect("failed to instantiate wasm module") + .assert_no_start(); + + let test_data_ptr = assert_matches!( + instance.invoke_export("prepare_tiny_keccak", &[], &mut NopExternals), + Ok(Some(v @ RuntimeValue::I32(_))) => v + ); + + b.iter(|| { + instance + .invoke_export("bench_tiny_keccak", &[test_data_ptr], &mut NopExternals) + .unwrap(); + }); +} diff --git a/benches/wasm-kernel/.gitignore b/benches/wasm-kernel/.gitignore new file mode 100644 index 0000000..ea8c4bf --- /dev/null +++ b/benches/wasm-kernel/.gitignore @@ -0,0 +1 @@ +/target diff --git a/benches/wasm-kernel/Cargo.toml b/benches/wasm-kernel/Cargo.toml new file mode 100644 index 0000000..f9717f5 --- /dev/null +++ b/benches/wasm-kernel/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "wasm-kernel" +version = "0.1.0" +authors = ["Sergey Pepyakin "] + +[lib] +crate-type = ["cdylib"] + +[dependencies] +tiny-keccak = "1.4.2" +rlibc = "1.0" + +[profile.release] +panic = "abort" +lto = true +opt-level = "z" diff --git a/benches/wasm-kernel/src/lib.rs b/benches/wasm-kernel/src/lib.rs new file mode 100644 index 0000000..ee172ff --- /dev/null +++ b/benches/wasm-kernel/src/lib.rs @@ -0,0 +1,50 @@ +#![no_std] +#![feature(lang_items)] +#![feature(core_intrinsics)] +#![feature(panic_implementation)] + +extern crate rlibc; +extern crate tiny_keccak; + +use tiny_keccak::Keccak; + +#[no_mangle] +#[panic_implementation] +pub fn panic_fmt(_info: &::core::panic::PanicInfo) -> ! { + use core::intrinsics; + unsafe { + intrinsics::abort(); + } +} + +pub struct TinyKeccakTestData { + data: &'static [u8], + result: &'static mut [u8], +} + +#[no_mangle] +pub extern "C" fn prepare_tiny_keccak() -> *const TinyKeccakTestData { + static DATA: [u8; 4096] = [254u8; 4096]; + static mut RESULT: [u8; 32] = [0u8; 32]; + + static mut TEST_DATA: Option = None; + + unsafe { + if let None = TEST_DATA { + TEST_DATA = Some(TinyKeccakTestData { + data: &DATA, + result: &mut RESULT, + }); + } + TEST_DATA.as_ref().unwrap() as *const TinyKeccakTestData + } +} + +#[no_mangle] +pub extern "C" fn bench_tiny_keccak(test_data: *const TinyKeccakTestData) { + unsafe { + let mut keccak = Keccak::new_keccak256(); + keccak.update((*test_data).data); + keccak.finalize((*test_data).result); + } +} diff --git a/doc.sh b/doc.sh index 5148060..652db79 100755 --- a/doc.sh +++ b/doc.sh @@ -4,11 +4,7 @@ set -eux cd $(dirname $0) -if [ -s NIGHTLY_TOOLCHAIN ]; then - rustup run $NIGHTLY_TOOLCHAIN cargo doc -else - cargo doc -fi; +cargo doc # cargo-deadlinks will check any links in docs generated by `cargo doc`. # This is useful as rustdoc uses raw links which are error prone. diff --git a/examples/invoke.rs b/examples/invoke.rs index f037287..2b7dd99 100644 --- a/examples/invoke.rs +++ b/examples/invoke.rs @@ -64,8 +64,8 @@ fn main() { function_type.params().iter().enumerate().map(|(i, value)| match value { &ValueType::I32 => RuntimeValue::I32(program_args[i].parse::().expect(&format!("Can't parse arg #{} as i32", program_args[i]))), &ValueType::I64 => RuntimeValue::I64(program_args[i].parse::().expect(&format!("Can't parse arg #{} as i64", program_args[i]))), - &ValueType::F32 => RuntimeValue::F32(program_args[i].parse::().expect(&format!("Can't parse arg #{} as f32", program_args[i]))), - &ValueType::F64 => RuntimeValue::F64(program_args[i].parse::().expect(&format!("Can't parse arg #{} as f64", program_args[i]))), + &ValueType::F32 => RuntimeValue::F32(program_args[i].parse::().expect(&format!("Can't parse arg #{} as f32", program_args[i])).into()), + &ValueType::F64 => RuntimeValue::F64(program_args[i].parse::().expect(&format!("Can't parse arg #{} as f64", program_args[i])).into()), }).collect::>() }; diff --git a/src/common/mod.rs b/src/common/mod.rs index 0801984..49ff10c 100644 --- a/src/common/mod.rs +++ b/src/common/mod.rs @@ -1,4 +1,3 @@ -use parity_wasm::elements::BlockType; pub mod stack; @@ -7,38 +6,4 @@ pub const DEFAULT_MEMORY_INDEX: u32 = 0; /// Index of default table. pub const DEFAULT_TABLE_INDEX: u32 = 0; -/// Control stack frame. -#[derive(Debug, Clone)] -pub struct BlockFrame { - /// Frame type. - pub frame_type: BlockFrameType, - /// A signature, which is a block signature type indicating the number and types of result values of the region. - pub block_type: BlockType, - /// A label for reference to block instruction. - pub begin_position: usize, - /// A label for reference from branch instructions. - pub branch_position: usize, - /// A label for reference from end instructions. - pub end_position: usize, - /// A limit integer value, which is an index into the value stack indicating where to reset it to on a branch to that label. - pub value_stack_len: usize, - /// Boolean which signals whether value stack became polymorphic. Value stack starts in non-polymorphic state and - /// becomes polymorphic only after an instruction that never passes control further is executed, - /// i.e. `unreachable`, `br` (but not `br_if`!), etc. - pub polymorphic_stack: bool, -} - -/// Type of block frame. -#[derive(Debug, Clone, Copy, PartialEq)] -pub enum BlockFrameType { - /// Function frame. - Function, - /// Usual block frame. - Block, - /// Loop frame (branching to the beginning of block). - Loop, - /// True-subblock of if expression. - IfTrue, - /// False-subblock of if expression. - IfFalse, -} +// TODO: Move BlockFrame under validation. diff --git a/src/common/stack.rs b/src/common/stack.rs index fc1e289..dea4b9c 100644 --- a/src/common/stack.rs +++ b/src/common/stack.rs @@ -1,5 +1,4 @@ -use std::collections::VecDeque; use std::error; use std::fmt; @@ -22,7 +21,7 @@ impl error::Error for Error { #[derive(Debug)] pub struct StackWithLimit where T: Clone { /// Stack values. - values: VecDeque, + values: Vec, /// Stack limit (maximal stack len). limit: usize, } @@ -30,7 +29,7 @@ pub struct StackWithLimit where T: Clone { impl StackWithLimit where T: Clone { pub fn with_limit(limit: usize) -> Self { StackWithLimit { - values: VecDeque::new(), + values: Vec::new(), limit: limit } } @@ -43,19 +42,17 @@ impl StackWithLimit where T: Clone { self.values.len() } - pub fn limit(&self) -> usize { - self.limit - } - pub fn top(&self) -> Result<&T, Error> { + let len = self.values.len(); self.values - .back() + .get(len - 1) .ok_or_else(|| Error("non-empty stack expected".into())) } pub fn top_mut(&mut self) -> Result<&mut T, Error> { + let len = self.values.len(); self.values - .back_mut() + .get_mut(len - 1) .ok_or_else(|| Error("non-empty stack expected".into())) } @@ -72,13 +69,13 @@ impl StackWithLimit where T: Clone { return Err(Error(format!("exceeded stack limit {}", self.limit))); } - self.values.push_back(value); + self.values.push(value); Ok(()) } pub fn pop(&mut self) -> Result { self.values - .pop_back() + .pop() .ok_or_else(|| Error("non-empty stack expected".into())) } diff --git a/src/func.rs b/src/func.rs index b72107d..beeacea 100644 --- a/src/func.rs +++ b/src/func.rs @@ -1,12 +1,12 @@ use std::rc::{Rc, Weak}; use std::fmt; -use std::collections::HashMap; -use parity_wasm::elements::{Local, Opcodes}; +use parity_wasm::elements::Local; use {Trap, TrapKind, Signature}; use host::Externals; use runner::{check_function_args, Interpreter}; use value::RuntimeValue; use module::ModuleInstance; +use isa; /// Reference to a function (See [`FuncInstance`] for details). /// @@ -158,6 +158,5 @@ impl FuncInstance { #[derive(Clone, Debug)] pub struct FuncBody { pub locals: Vec, - pub opcodes: Opcodes, - pub labels: HashMap, + pub code: isa::Instructions, } diff --git a/src/isa.rs b/src/isa.rs new file mode 100644 index 0000000..84c8b37 --- /dev/null +++ b/src/isa.rs @@ -0,0 +1,255 @@ +//! An instruction set used by wasmi. +//! +//! The instruction set is mostly derived from Wasm. However, +//! there is a substantial difference. +//! +//! # Structured Stack Machine vs Traditional One +//! +//! Wasm is a structured stack machine. Wasm encodes control flow in structures +//! similar to that commonly found in a programming languages +//! such as if, while. That contrasts to a traditional stack machine which +//! encodes all control flow with goto-like instructions. +//! +//! Structured stack machine code aligns well with goals of Wasm, +//! namely providing fast validation of Wasm code and compilation to native code. +//! +//! Unfortunately, the downside of structured stack machine code is +//! that it is less convenient to interpret. For example, let's look at +//! the following example in hypothetical structured stack machine: +//! +//! ```plain +//! loop +//! ... +//! if_true_jump_to_end +//! ... +//! end +//! ``` +//! +//! To execute `if_true_jump_to_end` , the interpreter needs to skip all instructions +//! until it reaches the *matching* `end`. That's quite inefficient compared +//! to a plain goto to the specific position. +//! +//! # Differences from Wasm +//! +//! - There is no `nop` instruction. +//! - All control flow strucutres are flattened to plain gotos. +//! - Implicit returns via reaching function scope `End` are replaced with an explicit `return` instruction. +//! - Locals live on the value stack now. +//! - Load/store instructions doesn't take `align` parameter. +//! - *.const store value in straight encoding. +//! - Reserved immediates are ignored for `call_indirect`, `current_memory`, `grow_memory`. +//! + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Target { + pub dst_pc: u32, + pub drop: u32, + pub keep: u8, +} + +#[allow(unused)] // TODO: Remove +#[derive(Debug, Clone, PartialEq)] +pub enum Instruction { + /// Push a local variable or an argument from the specified depth. + GetLocal(u32), + + /// Pop a value and put it in at the specified depth. + SetLocal(u32), + + /// Copy a value to the specified depth. + TeeLocal(u32), + + /// Similar to the Wasm ones, but instead of a label depth + /// they specify direct PC. + Br(Target), + BrIfEqz(Target), + BrIfNez(Target), + + /// Last one is the default. + /// + /// Can be less than zero. + BrTable(Box<[Target]>), + + Unreachable, + Return { + drop: u32, + keep: u8, + }, + + Call(u32), + CallIndirect(u32), + + Drop, + Select, + + GetGlobal(u32), + SetGlobal(u32), + + I32Load(u32), + I64Load(u32), + F32Load(u32), + F64Load(u32), + I32Load8S(u32), + I32Load8U(u32), + I32Load16S(u32), + I32Load16U(u32), + I64Load8S(u32), + I64Load8U(u32), + I64Load16S(u32), + I64Load16U(u32), + I64Load32S(u32), + I64Load32U(u32), + I32Store(u32), + I64Store(u32), + F32Store(u32), + F64Store(u32), + I32Store8(u32), + I32Store16(u32), + I64Store8(u32), + I64Store16(u32), + I64Store32(u32), + + CurrentMemory, + GrowMemory, + + I32Const(i32), + I64Const(i64), + F32Const(u32), + F64Const(u64), + + I32Eqz, + I32Eq, + I32Ne, + I32LtS, + I32LtU, + I32GtS, + I32GtU, + I32LeS, + I32LeU, + I32GeS, + I32GeU, + + I64Eqz, + I64Eq, + I64Ne, + I64LtS, + I64LtU, + I64GtS, + I64GtU, + I64LeS, + I64LeU, + I64GeS, + I64GeU, + + F32Eq, + F32Ne, + F32Lt, + F32Gt, + F32Le, + F32Ge, + + F64Eq, + F64Ne, + F64Lt, + F64Gt, + F64Le, + F64Ge, + + I32Clz, + I32Ctz, + I32Popcnt, + I32Add, + I32Sub, + I32Mul, + I32DivS, + I32DivU, + I32RemS, + I32RemU, + I32And, + I32Or, + I32Xor, + I32Shl, + I32ShrS, + I32ShrU, + I32Rotl, + I32Rotr, + + I64Clz, + I64Ctz, + I64Popcnt, + I64Add, + I64Sub, + I64Mul, + I64DivS, + I64DivU, + I64RemS, + I64RemU, + I64And, + I64Or, + I64Xor, + I64Shl, + I64ShrS, + I64ShrU, + I64Rotl, + I64Rotr, + F32Abs, + F32Neg, + F32Ceil, + F32Floor, + F32Trunc, + F32Nearest, + F32Sqrt, + F32Add, + F32Sub, + F32Mul, + F32Div, + F32Min, + F32Max, + F32Copysign, + F64Abs, + F64Neg, + F64Ceil, + F64Floor, + F64Trunc, + F64Nearest, + F64Sqrt, + F64Add, + F64Sub, + F64Mul, + F64Div, + F64Min, + F64Max, + F64Copysign, + + I32WrapI64, + I32TruncSF32, + I32TruncUF32, + I32TruncSF64, + I32TruncUF64, + I64ExtendSI32, + I64ExtendUI32, + I64TruncSF32, + I64TruncUF32, + I64TruncSF64, + I64TruncUF64, + F32ConvertSI32, + F32ConvertUI32, + F32ConvertSI64, + F32ConvertUI64, + F32DemoteF64, + F64ConvertSI32, + F64ConvertUI32, + F64ConvertSI64, + F64ConvertUI64, + F64PromoteF32, + + I32ReinterpretF32, + I64ReinterpretF64, + F32ReinterpretI32, + F64ReinterpretI64, +} + +#[derive(Debug, Clone)] +pub struct Instructions { + pub code: Vec, +} diff --git a/src/lib.rs b/src/lib.rs index 618ae75..c563c75 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -98,20 +98,17 @@ #[cfg(test)] extern crate wabt; +#[cfg(test)] +#[macro_use] +extern crate assert_matches; + extern crate parity_wasm; extern crate byteorder; extern crate memory_units as memory_units_crate; - -#[cfg(all(not(feature = "opt-in-32bit"), target_pointer_width = "32"))] -compile_error! { -"32-bit targets are not supported at the moment. -You can use 'opt-in-32bit' feature. -See https://github.com/pepyakin/wasmi/issues/43" -} +extern crate nan_preserving_float; use std::fmt; use std::error; -use std::collections::HashMap; /// Error type which can thrown by wasm code or by host environment. /// @@ -358,6 +355,7 @@ mod imports; mod global; mod func; mod types; +mod isa; #[cfg(test)] mod tests; @@ -380,12 +378,11 @@ pub mod memory_units { /// Deserialized module prepared for instantiation. pub struct Module { - labels: HashMap>, + code_map: Vec, module: parity_wasm::elements::Module, } impl Module { - /// Create `Module` from `parity_wasm::elements::Module`. /// /// This function will load, validate and prepare a `parity_wasm`'s `Module`. @@ -421,16 +418,76 @@ impl Module { pub fn from_parity_wasm_module(module: parity_wasm::elements::Module) -> Result { use validation::{validate_module, ValidatedModule}; let ValidatedModule { - labels, + code_map, module, } = validate_module(module)?; Ok(Module { - labels, + code_map, module, }) } + /// Fail if the module contains any floating-point operations + /// + /// # Errors + /// + /// Returns `Err` if provided `Module` is not valid. + /// + /// # Examples + /// + /// ```rust + /// # extern crate wasmi; + /// # extern crate wabt; + /// + /// let wasm_binary: Vec = + /// wabt::wat2wasm( + /// r#" + /// (module + /// (func $add (param $lhs i32) (param $rhs i32) (result i32) + /// get_local $lhs + /// get_local $rhs + /// i32.add)) + /// "#, + /// ) + /// .expect("failed to parse wat"); + /// + /// // Load wasm binary and prepare it for instantiation. + /// let module = wasmi::Module::from_buffer(&wasm_binary).expect("Parsing failed"); + /// assert!(module.deny_floating_point().is_ok()); + /// + /// let wasm_binary: Vec = + /// wabt::wat2wasm( + /// r#" + /// (module + /// (func $add (param $lhs f32) (param $rhs f32) (result f32) + /// get_local $lhs + /// get_local $rhs + /// f32.add)) + /// "#, + /// ) + /// .expect("failed to parse wat"); + /// + /// let module = wasmi::Module::from_buffer(&wasm_binary).expect("Parsing failed"); + /// assert!(module.deny_floating_point().is_err()); + /// + /// let wasm_binary: Vec = + /// wabt::wat2wasm( + /// r#" + /// (module + /// (func $add (param $lhs f32) (param $rhs f32) (result f32) + /// get_local $lhs)) + /// "#, + /// ) + /// .expect("failed to parse wat"); + /// + /// let module = wasmi::Module::from_buffer(&wasm_binary).expect("Parsing failed"); + /// assert!(module.deny_floating_point().is_err()); + /// ``` + pub fn deny_floating_point(&self) -> Result<(), Error> { + validation::deny_floating_point(&self.module).map_err(Into::into) + } + /// Create `Module` from a given buffer. /// /// This function will deserialize wasm module from a given module, @@ -467,7 +524,7 @@ impl Module { &self.module } - pub(crate) fn labels(&self) -> &HashMap> { - &self.labels + pub(crate) fn code(&self) -> &Vec { + &self.code_map } } diff --git a/src/memory.rs b/src/memory.rs index 1f269f1..339a445 100644 --- a/src/memory.rs +++ b/src/memory.rs @@ -48,7 +48,7 @@ impl ::std::ops::Deref for MemoryRef { /// /// [`LINEAR_MEMORY_PAGE_SIZE`]: constant.LINEAR_MEMORY_PAGE_SIZE.html pub struct MemoryInstance { - /// Memofy limits. + /// Memory limits. limits: ResizableLimits, /// Linear memory buffer. buffer: RefCell>, @@ -315,7 +315,7 @@ impl MemoryInstance { Ok(()) } - /// Fill memory region with a specified value. + /// Fill the memory region with the specified value. /// /// Semantically equivalent to `memset`. /// @@ -330,7 +330,7 @@ impl MemoryInstance { Ok(()) } - /// Fill specified memory region with zeroes. + /// Fill the specified memory region with zeroes. /// /// # Errors /// @@ -338,6 +338,35 @@ impl MemoryInstance { pub fn zero(&self, offset: usize, len: usize) -> Result<(), Error> { self.clear(offset, 0, len) } + + /// Provides direct access to the underlying memory buffer. + /// + /// # Panics + /// + /// Any call that requires write access to memory (such as [`set`], [`clear`], etc) made within + /// the closure will panic. Proceed with caution. + /// + /// [`set`]: #method.get + /// [`clear`]: #method.set + pub fn with_direct_access R>(&self, f: F) -> R { + let buf = self.buffer.borrow(); + f(&*buf) + } + + /// Provides direct mutable access to the underlying memory buffer. + /// + /// # Panics + /// + /// Any calls that requires either read or write access to memory (such as [`get`], [`set`], [`copy`], etc) made + /// within the closure will panic. Proceed with caution. + /// + /// [`get`]: #method.get + /// [`set`]: #method.set + /// [`copy`]: #method.copy + pub fn with_direct_access_mut R>(&self, f: F) -> R { + let mut buf = self.buffer.borrow_mut(); + f(&mut *buf) + } } pub fn validate_memory(initial: Pages, maximum: Option) -> Result<(), String> { @@ -369,6 +398,7 @@ mod tests { #[test] fn alloc() { + #[cfg(target_pointer_width = "64")] let fixtures = &[ (0, None, true), (0, Some(0), true), @@ -381,6 +411,17 @@ mod tests { (65536, Some(0), false), (65536, None, true), ]; + + #[cfg(target_pointer_width = "32")] + let fixtures = &[ + (0, None, true), + (0, Some(0), true), + (1, None, true), + (1, Some(1), true), + (0, Some(1), true), + (1, Some(0), false), + ]; + for (index, &(initial, maybe_max, expected_ok)) in fixtures.iter().enumerate() { let initial: Pages = Pages(initial); let maximum: Option = maybe_max.map(|m| Pages(m)); @@ -472,4 +513,27 @@ mod tests { assert_eq!(data, [17, 129]); } + + #[test] + fn zero_copy() { + let mem = MemoryInstance::alloc(Pages(1), None).unwrap(); + mem.with_direct_access_mut(|buf| { + assert_eq!(buf.len(), 65536); + buf[..10].copy_from_slice(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); + }); + mem.with_direct_access(|buf| { + assert_eq!(buf.len(), 65536); + assert_eq!(&buf[..10], &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); + }); + } + + #[should_panic] + #[test] + fn zero_copy_panics_on_nested_access() { + let mem = MemoryInstance::alloc(Pages(1), None).unwrap(); + let mem_inner = mem.clone(); + mem.with_direct_access(move |_| { + let _ = mem_inner.set(0, &[11, 12, 13]); + }); + } } diff --git a/src/module.rs b/src/module.rs index ba94bba..a3a9fa0 100644 --- a/src/module.rs +++ b/src/module.rs @@ -291,7 +291,7 @@ impl ModuleInstance { } } - let labels = loaded_module.labels(); + let code = loaded_module.code(); { let funcs = module.function_section().map(|fs| fs.entries()).unwrap_or( &[], @@ -308,13 +308,12 @@ impl ModuleInstance { let signature = instance.signature_by_index(ty.type_ref()).expect( "Due to validation type should exists", ); - let labels = labels.get(&index).expect( + let code = code.get(index).expect( "At func validation time labels are collected; Collected labels are added by index; qed", ).clone(); let func_body = FuncBody { locals: body.locals().to_vec(), - opcodes: body.code().clone(), - labels: labels, + code: code, }; let func_instance = FuncInstance::alloc_internal(Rc::downgrade(&instance.0), signature, func_body); @@ -420,7 +419,7 @@ impl ModuleInstance { // This check is not only for bailing out early, but also to check the case when // segment consist of 0 members. - if offset_val as usize + element_segment.members().len() > table_inst.current_size() as usize { + if offset_val as u64 + element_segment.members().len() as u64 > table_inst.current_size() as u64 { return Err( Error::Instantiation("elements segment does not fit".to_string()) ); diff --git a/src/runner.rs b/src/runner.rs index 15ec910..00b3736 100644 --- a/src/runner.rs +++ b/src/runner.rs @@ -3,114 +3,145 @@ use std::ops; use std::{u32, usize}; use std::fmt; use std::iter::repeat; -use std::collections::{HashMap, VecDeque}; -use parity_wasm::elements::{Opcode, BlockType, Local}; +use parity_wasm::elements::Local; use {Error, Trap, TrapKind, Signature}; use module::ModuleRef; +use memory::MemoryRef; use func::{FuncRef, FuncInstance, FuncInstanceInternal}; use value::{ RuntimeValue, FromRuntimeValue, WrapInto, TryTruncateInto, ExtendInto, ArithmeticOps, Integer, Float, LittleEndianConvert, TransmuteInto, }; use host::Externals; -use common::{DEFAULT_MEMORY_INDEX, DEFAULT_TABLE_INDEX, BlockFrame, BlockFrameType}; -use common::stack::StackWithLimit; +use common::{DEFAULT_MEMORY_INDEX, DEFAULT_TABLE_INDEX}; use memory_units::Pages; +use nan_preserving_float::{F32, F64}; +use isa; /// Maximum number of entries in value stack. -pub const DEFAULT_VALUE_STACK_LIMIT: usize = 16384; -/// Maximum number of entries in frame stack. -pub const DEFAULT_FRAME_STACK_LIMIT: usize = 16384; +pub const DEFAULT_VALUE_STACK_LIMIT: usize = (512 * 1024) / ::std::mem::size_of::(); -/// Function interpreter. -pub struct Interpreter<'a, E: Externals + 'a> { - externals: &'a mut E, -} +// TODO: Make these parameters changeble. +pub const DEFAULT_CALL_STACK_LIMIT: usize = 16 * 1024; /// Interpreter action to execute after executing instruction. pub enum InstructionOutcome { /// Continue with next instruction. RunNextInstruction, - /// Branch to given frame. - Branch(usize), + /// Branch to an instruction at the given position. + Branch(isa::Target), /// Execute function call. ExecuteCall(FuncRef), - /// End current frame. - End, /// Return from current function block. - Return, + Return(u32, u8), } /// Function run result. enum RunResult { - /// Function has returned (optional) value. - Return(Option), + /// Function has returned. + Return, /// Function is calling other function. NestedCall(FuncRef), } +/// Function interpreter. +pub struct Interpreter<'a, E: Externals + 'a> { + externals: &'a mut E, + value_stack: ValueStack, +} + impl<'a, E: Externals> Interpreter<'a, E> { pub fn new(externals: &'a mut E) -> Interpreter<'a, E> { + let value_stack = ValueStack::with_limit(DEFAULT_VALUE_STACK_LIMIT); Interpreter { externals, + value_stack, } } pub fn start_execution(&mut self, func: &FuncRef, args: &[RuntimeValue]) -> Result, Trap> { - let context = FunctionContext::new( - func.clone(), - DEFAULT_VALUE_STACK_LIMIT, - DEFAULT_FRAME_STACK_LIMIT, - func.signature(), - args.into_iter().cloned().collect(), - ); + for arg in args { + self.value_stack + .push(*arg) + .map_err( + // There is not enough space for pushing initial arguments. + // Weird, but bail out anyway. + |_| Trap::from(TrapKind::StackOverflow) + )?; + } - let mut function_stack = VecDeque::new(); - function_stack.push_back(context); + let initial_frame = FunctionContext::new(func.clone()); - self.run_interpreter_loop(&mut function_stack) + let mut call_stack = Vec::new(); + call_stack.push(initial_frame); + + self.run_interpreter_loop(&mut call_stack)?; + + Ok(func.signature().return_type().map(|_vt| { + let return_value = self.value_stack + .pop(); + + return_value + })) } - fn run_interpreter_loop(&mut self, function_stack: &mut VecDeque) -> Result, Trap> { + fn run_interpreter_loop(&mut self, call_stack: &mut Vec) -> Result<(), Trap> { loop { - let mut function_context = function_stack.pop_back().expect("on loop entry - not empty; on loop continue - checking for emptiness; qed"); + let mut function_context = call_stack + .pop() + .expect("on loop entry - not empty; on loop continue - checking for emptiness; qed"); let function_ref = function_context.function.clone(); let function_body = function_ref .body() .expect( "Host functions checked in function_return below; Internal functions always have a body; qed" ); + if !function_context.is_initialized() { - let return_type = function_context.return_type; - function_context.initialize(&function_body.locals); - function_context.push_frame(&function_body.labels, BlockFrameType::Function, return_type).map_err(Trap::new)?; + // Initialize stack frame for the function call. + function_context.initialize(&function_body.locals, &mut self.value_stack)?; } - let function_return = self.do_run_function(&mut function_context, function_body.opcodes.elements(), &function_body.labels).map_err(Trap::new)?; + let function_return = + self.do_run_function( + &mut function_context, + &function_body.code.code, + ).map_err(Trap::new)?; match function_return { - RunResult::Return(return_value) => { - match function_stack.back_mut() { - Some(caller_context) => if let Some(return_value) = return_value { - caller_context.value_stack_mut().push(return_value).map_err(Trap::new)?; - }, - None => return Ok(return_value), + RunResult::Return => { + if call_stack.last().is_none() { + // This was the last frame in the call stack. This means we + // are done executing. + return Ok(()); } }, RunResult::NestedCall(nested_func) => { + if call_stack.len() + 1 >= DEFAULT_CALL_STACK_LIMIT { + return Err(TrapKind::StackOverflow.into()); + } + match *nested_func.as_internal() { FuncInstanceInternal::Internal { .. } => { - let nested_context = function_context.nested(nested_func.clone()).map_err(Trap::new)?; - function_stack.push_back(function_context); - function_stack.push_back(nested_context); + let nested_context = FunctionContext::new(nested_func.clone()); + call_stack.push(function_context); + call_stack.push(nested_context); }, FuncInstanceInternal::Host { ref signature, .. } => { - let args = prepare_function_args(signature, &mut function_context.value_stack); + let args = prepare_function_args(signature, &mut self.value_stack); let return_val = FuncInstance::invoke(&nested_func, &args, self.externals)?; - if let Some(return_val) = return_val { - function_context.value_stack_mut().push(return_val).map_err(Trap::new)?; + + // Check if `return_val` matches the signature. + let value_ty = return_val.clone().map(|val| val.value_type()); + let expected_ty = nested_func.signature().return_type(); + if value_ty != expected_ty { + return Err(TrapKind::UnexpectedSignature.into()); } - function_stack.push_back(function_context); + + if let Some(return_val) = return_val { + self.value_stack.push(return_val).map_err(Trap::new)?; + } + call_stack.push(function_context); } } }, @@ -118,239 +149,218 @@ impl<'a, E: Externals> Interpreter<'a, E> { } } - fn do_run_function(&mut self, function_context: &mut FunctionContext, function_body: &[Opcode], function_labels: &HashMap) -> Result { + fn do_run_function(&mut self, function_context: &mut FunctionContext, instructions: &[isa::Instruction]) -> Result { loop { - let instruction = &function_body[function_context.position]; + let instruction = &instructions[function_context.position]; - match self.run_instruction(function_context, function_labels, instruction)? { + match self.run_instruction(function_context, instruction)? { InstructionOutcome::RunNextInstruction => function_context.position += 1, - InstructionOutcome::Branch(mut index) => { - // discard index - 1 blocks - while index >= 1 { - function_context.discard_frame(); - index -= 1; - } - - function_context.pop_frame(true)?; - if function_context.frame_stack().is_empty() { - break; - } + InstructionOutcome::Branch(target) => { + function_context.position = target.dst_pc as usize; + self.value_stack.drop_keep(target.drop, target.keep); }, InstructionOutcome::ExecuteCall(func_ref) => { function_context.position += 1; return Ok(RunResult::NestedCall(func_ref)); }, - InstructionOutcome::End => { - if function_context.frame_stack().is_empty() { - break; - } + InstructionOutcome::Return(drop, keep) => { + self.value_stack.drop_keep(drop, keep); + break; }, - InstructionOutcome::Return => break, } } - Ok(RunResult::Return(match function_context.return_type { - BlockType::Value(_) => { - let result = function_context - .value_stack_mut() - .pop(); - Some(result) - }, - BlockType::NoResult => None, - })) + Ok(RunResult::Return) } - fn run_instruction(&mut self, context: &mut FunctionContext, labels: &HashMap, opcode: &Opcode) -> Result { - match opcode { - &Opcode::Unreachable => self.run_unreachable(context), - &Opcode::Nop => self.run_nop(context), - &Opcode::Block(block_type) => self.run_block(context, labels, block_type), - &Opcode::Loop(block_type) => self.run_loop(context, labels, block_type), - &Opcode::If(block_type) => self.run_if(context, labels, block_type), - &Opcode::Else => self.run_else(context, labels), - &Opcode::End => self.run_end(context), - &Opcode::Br(idx) => self.run_br(context, idx), - &Opcode::BrIf(idx) => self.run_br_if(context, idx), - &Opcode::BrTable(ref table, default) => self.run_br_table(context, table, default), - &Opcode::Return => self.run_return(context), + #[inline(always)] + fn run_instruction(&mut self, context: &mut FunctionContext, instruction: &isa::Instruction) -> Result { + match instruction { + &isa::Instruction::Unreachable => self.run_unreachable(context), - &Opcode::Call(index) => self.run_call(context, index), - &Opcode::CallIndirect(index, _reserved) => self.run_call_indirect(context, index), + &isa::Instruction::Br(ref target) => self.run_br(context, target.clone()), + &isa::Instruction::BrIfEqz(ref target) => self.run_br_eqz(target.clone()), + &isa::Instruction::BrIfNez(ref target) => self.run_br_nez(target.clone()), + &isa::Instruction::BrTable(ref targets) => self.run_br_table(targets), + &isa::Instruction::Return { drop, keep } => self.run_return(drop, keep), - &Opcode::Drop => self.run_drop(context), - &Opcode::Select => self.run_select(context), + &isa::Instruction::Call(index) => self.run_call(context, index), + &isa::Instruction::CallIndirect(index) => self.run_call_indirect(context, index), - &Opcode::GetLocal(index) => self.run_get_local(context, index), - &Opcode::SetLocal(index) => self.run_set_local(context, index), - &Opcode::TeeLocal(index) => self.run_tee_local(context, index), - &Opcode::GetGlobal(index) => self.run_get_global(context, index), - &Opcode::SetGlobal(index) => self.run_set_global(context, index), + &isa::Instruction::Drop => self.run_drop(), + &isa::Instruction::Select => self.run_select(), - &Opcode::I32Load(align, offset) => self.run_load::(context, align, offset), - &Opcode::I64Load(align, offset) => self.run_load::(context, align, offset), - &Opcode::F32Load(align, offset) => self.run_load::(context, align, offset), - &Opcode::F64Load(align, offset) => self.run_load::(context, align, offset), - &Opcode::I32Load8S(align, offset) => self.run_load_extend::(context, align, offset), - &Opcode::I32Load8U(align, offset) => self.run_load_extend::(context, align, offset), - &Opcode::I32Load16S(align, offset) => self.run_load_extend::(context, align, offset), - &Opcode::I32Load16U(align, offset) => self.run_load_extend::(context, align, offset), - &Opcode::I64Load8S(align, offset) => self.run_load_extend::(context, align, offset), - &Opcode::I64Load8U(align, offset) => self.run_load_extend::(context, align, offset), - &Opcode::I64Load16S(align, offset) => self.run_load_extend::(context, align, offset), - &Opcode::I64Load16U(align, offset) => self.run_load_extend::(context, align, offset), - &Opcode::I64Load32S(align, offset) => self.run_load_extend::(context, align, offset), - &Opcode::I64Load32U(align, offset) => self.run_load_extend::(context, align, offset), + &isa::Instruction::GetLocal(depth) => self.run_get_local(depth), + &isa::Instruction::SetLocal(depth) => self.run_set_local(depth), + &isa::Instruction::TeeLocal(depth) => self.run_tee_local(depth), + &isa::Instruction::GetGlobal(index) => self.run_get_global(context, index), + &isa::Instruction::SetGlobal(index) => self.run_set_global(context, index), - &Opcode::I32Store(align, offset) => self.run_store::(context, align, offset), - &Opcode::I64Store(align, offset) => self.run_store::(context, align, offset), - &Opcode::F32Store(align, offset) => self.run_store::(context, align, offset), - &Opcode::F64Store(align, offset) => self.run_store::(context, align, offset), - &Opcode::I32Store8(align, offset) => self.run_store_wrap::(context, align, offset), - &Opcode::I32Store16(align, offset) => self.run_store_wrap::(context, align, offset), - &Opcode::I64Store8(align, offset) => self.run_store_wrap::(context, align, offset), - &Opcode::I64Store16(align, offset) => self.run_store_wrap::(context, align, offset), - &Opcode::I64Store32(align, offset) => self.run_store_wrap::(context, align, offset), + &isa::Instruction::I32Load(offset) => self.run_load::(context, offset), + &isa::Instruction::I64Load(offset) => self.run_load::(context, offset), + &isa::Instruction::F32Load(offset) => self.run_load::(context, offset), + &isa::Instruction::F64Load(offset) => self.run_load::(context, offset), + &isa::Instruction::I32Load8S(offset) => self.run_load_extend::(context, offset), + &isa::Instruction::I32Load8U(offset) => self.run_load_extend::(context, offset), + &isa::Instruction::I32Load16S(offset) => self.run_load_extend::(context, offset), + &isa::Instruction::I32Load16U(offset) => self.run_load_extend::(context, offset), + &isa::Instruction::I64Load8S(offset) => self.run_load_extend::(context, offset), + &isa::Instruction::I64Load8U(offset) => self.run_load_extend::(context, offset), + &isa::Instruction::I64Load16S(offset) => self.run_load_extend::(context, offset), + &isa::Instruction::I64Load16U(offset) => self.run_load_extend::(context, offset), + &isa::Instruction::I64Load32S(offset) => self.run_load_extend::(context, offset), + &isa::Instruction::I64Load32U(offset) => self.run_load_extend::(context, offset), - &Opcode::CurrentMemory(_) => self.run_current_memory(context), - &Opcode::GrowMemory(_) => self.run_grow_memory(context), + &isa::Instruction::I32Store(offset) => self.run_store::(context, offset), + &isa::Instruction::I64Store(offset) => self.run_store::(context, offset), + &isa::Instruction::F32Store(offset) => self.run_store::(context, offset), + &isa::Instruction::F64Store(offset) => self.run_store::(context, offset), + &isa::Instruction::I32Store8(offset) => self.run_store_wrap::(context, offset), + &isa::Instruction::I32Store16(offset) => self.run_store_wrap::(context, offset), + &isa::Instruction::I64Store8(offset) => self.run_store_wrap::(context, offset), + &isa::Instruction::I64Store16(offset) => self.run_store_wrap::(context, offset), + &isa::Instruction::I64Store32(offset) => self.run_store_wrap::(context, offset), - &Opcode::I32Const(val) => self.run_const(context, val.into()), - &Opcode::I64Const(val) => self.run_const(context, val.into()), - &Opcode::F32Const(val) => self.run_const(context, RuntimeValue::decode_f32(val)), - &Opcode::F64Const(val) => self.run_const(context, RuntimeValue::decode_f64(val)), + &isa::Instruction::CurrentMemory => self.run_current_memory(context), + &isa::Instruction::GrowMemory => self.run_grow_memory(context), - &Opcode::I32Eqz => self.run_eqz::(context), - &Opcode::I32Eq => self.run_eq::(context), - &Opcode::I32Ne => self.run_ne::(context), - &Opcode::I32LtS => self.run_lt::(context), - &Opcode::I32LtU => self.run_lt::(context), - &Opcode::I32GtS => self.run_gt::(context), - &Opcode::I32GtU => self.run_gt::(context), - &Opcode::I32LeS => self.run_lte::(context), - &Opcode::I32LeU => self.run_lte::(context), - &Opcode::I32GeS => self.run_gte::(context), - &Opcode::I32GeU => self.run_gte::(context), + &isa::Instruction::I32Const(val) => self.run_const(val.into()), + &isa::Instruction::I64Const(val) => self.run_const(val.into()), + &isa::Instruction::F32Const(val) => self.run_const(RuntimeValue::decode_f32(val)), + &isa::Instruction::F64Const(val) => self.run_const(RuntimeValue::decode_f64(val)), - &Opcode::I64Eqz => self.run_eqz::(context), - &Opcode::I64Eq => self.run_eq::(context), - &Opcode::I64Ne => self.run_ne::(context), - &Opcode::I64LtS => self.run_lt::(context), - &Opcode::I64LtU => self.run_lt::(context), - &Opcode::I64GtS => self.run_gt::(context), - &Opcode::I64GtU => self.run_gt::(context), - &Opcode::I64LeS => self.run_lte::(context), - &Opcode::I64LeU => self.run_lte::(context), - &Opcode::I64GeS => self.run_gte::(context), - &Opcode::I64GeU => self.run_gte::(context), + &isa::Instruction::I32Eqz => self.run_eqz::(), + &isa::Instruction::I32Eq => self.run_eq::(), + &isa::Instruction::I32Ne => self.run_ne::(), + &isa::Instruction::I32LtS => self.run_lt::(), + &isa::Instruction::I32LtU => self.run_lt::(), + &isa::Instruction::I32GtS => self.run_gt::(), + &isa::Instruction::I32GtU => self.run_gt::(), + &isa::Instruction::I32LeS => self.run_lte::(), + &isa::Instruction::I32LeU => self.run_lte::(), + &isa::Instruction::I32GeS => self.run_gte::(), + &isa::Instruction::I32GeU => self.run_gte::(), - &Opcode::F32Eq => self.run_eq::(context), - &Opcode::F32Ne => self.run_ne::(context), - &Opcode::F32Lt => self.run_lt::(context), - &Opcode::F32Gt => self.run_gt::(context), - &Opcode::F32Le => self.run_lte::(context), - &Opcode::F32Ge => self.run_gte::(context), + &isa::Instruction::I64Eqz => self.run_eqz::(), + &isa::Instruction::I64Eq => self.run_eq::(), + &isa::Instruction::I64Ne => self.run_ne::(), + &isa::Instruction::I64LtS => self.run_lt::(), + &isa::Instruction::I64LtU => self.run_lt::(), + &isa::Instruction::I64GtS => self.run_gt::(), + &isa::Instruction::I64GtU => self.run_gt::(), + &isa::Instruction::I64LeS => self.run_lte::(), + &isa::Instruction::I64LeU => self.run_lte::(), + &isa::Instruction::I64GeS => self.run_gte::(), + &isa::Instruction::I64GeU => self.run_gte::(), - &Opcode::F64Eq => self.run_eq::(context), - &Opcode::F64Ne => self.run_ne::(context), - &Opcode::F64Lt => self.run_lt::(context), - &Opcode::F64Gt => self.run_gt::(context), - &Opcode::F64Le => self.run_lte::(context), - &Opcode::F64Ge => self.run_gte::(context), + &isa::Instruction::F32Eq => self.run_eq::(), + &isa::Instruction::F32Ne => self.run_ne::(), + &isa::Instruction::F32Lt => self.run_lt::(), + &isa::Instruction::F32Gt => self.run_gt::(), + &isa::Instruction::F32Le => self.run_lte::(), + &isa::Instruction::F32Ge => self.run_gte::(), - &Opcode::I32Clz => self.run_clz::(context), - &Opcode::I32Ctz => self.run_ctz::(context), - &Opcode::I32Popcnt => self.run_popcnt::(context), - &Opcode::I32Add => self.run_add::(context), - &Opcode::I32Sub => self.run_sub::(context), - &Opcode::I32Mul => self.run_mul::(context), - &Opcode::I32DivS => self.run_div::(context), - &Opcode::I32DivU => self.run_div::(context), - &Opcode::I32RemS => self.run_rem::(context), - &Opcode::I32RemU => self.run_rem::(context), - &Opcode::I32And => self.run_and::(context), - &Opcode::I32Or => self.run_or::(context), - &Opcode::I32Xor => self.run_xor::(context), - &Opcode::I32Shl => self.run_shl::(context, 0x1F), - &Opcode::I32ShrS => self.run_shr::(context, 0x1F), - &Opcode::I32ShrU => self.run_shr::(context, 0x1F), - &Opcode::I32Rotl => self.run_rotl::(context), - &Opcode::I32Rotr => self.run_rotr::(context), + &isa::Instruction::F64Eq => self.run_eq::(), + &isa::Instruction::F64Ne => self.run_ne::(), + &isa::Instruction::F64Lt => self.run_lt::(), + &isa::Instruction::F64Gt => self.run_gt::(), + &isa::Instruction::F64Le => self.run_lte::(), + &isa::Instruction::F64Ge => self.run_gte::(), - &Opcode::I64Clz => self.run_clz::(context), - &Opcode::I64Ctz => self.run_ctz::(context), - &Opcode::I64Popcnt => self.run_popcnt::(context), - &Opcode::I64Add => self.run_add::(context), - &Opcode::I64Sub => self.run_sub::(context), - &Opcode::I64Mul => self.run_mul::(context), - &Opcode::I64DivS => self.run_div::(context), - &Opcode::I64DivU => self.run_div::(context), - &Opcode::I64RemS => self.run_rem::(context), - &Opcode::I64RemU => self.run_rem::(context), - &Opcode::I64And => self.run_and::(context), - &Opcode::I64Or => self.run_or::(context), - &Opcode::I64Xor => self.run_xor::(context), - &Opcode::I64Shl => self.run_shl::(context, 0x3F), - &Opcode::I64ShrS => self.run_shr::(context, 0x3F), - &Opcode::I64ShrU => self.run_shr::(context, 0x3F), - &Opcode::I64Rotl => self.run_rotl::(context), - &Opcode::I64Rotr => self.run_rotr::(context), + &isa::Instruction::I32Clz => self.run_clz::(), + &isa::Instruction::I32Ctz => self.run_ctz::(), + &isa::Instruction::I32Popcnt => self.run_popcnt::(), + &isa::Instruction::I32Add => self.run_add::(), + &isa::Instruction::I32Sub => self.run_sub::(), + &isa::Instruction::I32Mul => self.run_mul::(), + &isa::Instruction::I32DivS => self.run_div::(), + &isa::Instruction::I32DivU => self.run_div::(), + &isa::Instruction::I32RemS => self.run_rem::(), + &isa::Instruction::I32RemU => self.run_rem::(), + &isa::Instruction::I32And => self.run_and::(), + &isa::Instruction::I32Or => self.run_or::(), + &isa::Instruction::I32Xor => self.run_xor::(), + &isa::Instruction::I32Shl => self.run_shl::(0x1F), + &isa::Instruction::I32ShrS => self.run_shr::(0x1F), + &isa::Instruction::I32ShrU => self.run_shr::(0x1F), + &isa::Instruction::I32Rotl => self.run_rotl::(), + &isa::Instruction::I32Rotr => self.run_rotr::(), - &Opcode::F32Abs => self.run_abs::(context), - &Opcode::F32Neg => self.run_neg::(context), - &Opcode::F32Ceil => self.run_ceil::(context), - &Opcode::F32Floor => self.run_floor::(context), - &Opcode::F32Trunc => self.run_trunc::(context), - &Opcode::F32Nearest => self.run_nearest::(context), - &Opcode::F32Sqrt => self.run_sqrt::(context), - &Opcode::F32Add => self.run_add::(context), - &Opcode::F32Sub => self.run_sub::(context), - &Opcode::F32Mul => self.run_mul::(context), - &Opcode::F32Div => self.run_div::(context), - &Opcode::F32Min => self.run_min::(context), - &Opcode::F32Max => self.run_max::(context), - &Opcode::F32Copysign => self.run_copysign::(context), + &isa::Instruction::I64Clz => self.run_clz::(), + &isa::Instruction::I64Ctz => self.run_ctz::(), + &isa::Instruction::I64Popcnt => self.run_popcnt::(), + &isa::Instruction::I64Add => self.run_add::(), + &isa::Instruction::I64Sub => self.run_sub::(), + &isa::Instruction::I64Mul => self.run_mul::(), + &isa::Instruction::I64DivS => self.run_div::(), + &isa::Instruction::I64DivU => self.run_div::(), + &isa::Instruction::I64RemS => self.run_rem::(), + &isa::Instruction::I64RemU => self.run_rem::(), + &isa::Instruction::I64And => self.run_and::(), + &isa::Instruction::I64Or => self.run_or::(), + &isa::Instruction::I64Xor => self.run_xor::(), + &isa::Instruction::I64Shl => self.run_shl::(0x3F), + &isa::Instruction::I64ShrS => self.run_shr::(0x3F), + &isa::Instruction::I64ShrU => self.run_shr::(0x3F), + &isa::Instruction::I64Rotl => self.run_rotl::(), + &isa::Instruction::I64Rotr => self.run_rotr::(), - &Opcode::F64Abs => self.run_abs::(context), - &Opcode::F64Neg => self.run_neg::(context), - &Opcode::F64Ceil => self.run_ceil::(context), - &Opcode::F64Floor => self.run_floor::(context), - &Opcode::F64Trunc => self.run_trunc::(context), - &Opcode::F64Nearest => self.run_nearest::(context), - &Opcode::F64Sqrt => self.run_sqrt::(context), - &Opcode::F64Add => self.run_add::(context), - &Opcode::F64Sub => self.run_sub::(context), - &Opcode::F64Mul => self.run_mul::(context), - &Opcode::F64Div => self.run_div::(context), - &Opcode::F64Min => self.run_min::(context), - &Opcode::F64Max => self.run_max::(context), - &Opcode::F64Copysign => self.run_copysign::(context), + &isa::Instruction::F32Abs => self.run_abs::(), + &isa::Instruction::F32Neg => self.run_neg::(), + &isa::Instruction::F32Ceil => self.run_ceil::(), + &isa::Instruction::F32Floor => self.run_floor::(), + &isa::Instruction::F32Trunc => self.run_trunc::(), + &isa::Instruction::F32Nearest => self.run_nearest::(), + &isa::Instruction::F32Sqrt => self.run_sqrt::(), + &isa::Instruction::F32Add => self.run_add::(), + &isa::Instruction::F32Sub => self.run_sub::(), + &isa::Instruction::F32Mul => self.run_mul::(), + &isa::Instruction::F32Div => self.run_div::(), + &isa::Instruction::F32Min => self.run_min::(), + &isa::Instruction::F32Max => self.run_max::(), + &isa::Instruction::F32Copysign => self.run_copysign::(), - &Opcode::I32WrapI64 => self.run_wrap::(context), - &Opcode::I32TruncSF32 => self.run_trunc_to_int::(context), - &Opcode::I32TruncUF32 => self.run_trunc_to_int::(context), - &Opcode::I32TruncSF64 => self.run_trunc_to_int::(context), - &Opcode::I32TruncUF64 => self.run_trunc_to_int::(context), - &Opcode::I64ExtendSI32 => self.run_extend::(context), - &Opcode::I64ExtendUI32 => self.run_extend::(context), - &Opcode::I64TruncSF32 => self.run_trunc_to_int::(context), - &Opcode::I64TruncUF32 => self.run_trunc_to_int::(context), - &Opcode::I64TruncSF64 => self.run_trunc_to_int::(context), - &Opcode::I64TruncUF64 => self.run_trunc_to_int::(context), - &Opcode::F32ConvertSI32 => self.run_extend::(context), - &Opcode::F32ConvertUI32 => self.run_extend::(context), - &Opcode::F32ConvertSI64 => self.run_wrap::(context), - &Opcode::F32ConvertUI64 => self.run_wrap::(context), - &Opcode::F32DemoteF64 => self.run_wrap::(context), - &Opcode::F64ConvertSI32 => self.run_extend::(context), - &Opcode::F64ConvertUI32 => self.run_extend::(context), - &Opcode::F64ConvertSI64 => self.run_extend::(context), - &Opcode::F64ConvertUI64 => self.run_extend::(context), - &Opcode::F64PromoteF32 => self.run_extend::(context), + &isa::Instruction::F64Abs => self.run_abs::(), + &isa::Instruction::F64Neg => self.run_neg::(), + &isa::Instruction::F64Ceil => self.run_ceil::(), + &isa::Instruction::F64Floor => self.run_floor::(), + &isa::Instruction::F64Trunc => self.run_trunc::(), + &isa::Instruction::F64Nearest => self.run_nearest::(), + &isa::Instruction::F64Sqrt => self.run_sqrt::(), + &isa::Instruction::F64Add => self.run_add::(), + &isa::Instruction::F64Sub => self.run_sub::(), + &isa::Instruction::F64Mul => self.run_mul::(), + &isa::Instruction::F64Div => self.run_div::(), + &isa::Instruction::F64Min => self.run_min::(), + &isa::Instruction::F64Max => self.run_max::(), + &isa::Instruction::F64Copysign => self.run_copysign::(), - &Opcode::I32ReinterpretF32 => self.run_reinterpret::(context), - &Opcode::I64ReinterpretF64 => self.run_reinterpret::(context), - &Opcode::F32ReinterpretI32 => self.run_reinterpret::(context), - &Opcode::F64ReinterpretI64 => self.run_reinterpret::(context), + &isa::Instruction::I32WrapI64 => self.run_wrap::(), + &isa::Instruction::I32TruncSF32 => self.run_trunc_to_int::(), + &isa::Instruction::I32TruncUF32 => self.run_trunc_to_int::(), + &isa::Instruction::I32TruncSF64 => self.run_trunc_to_int::(), + &isa::Instruction::I32TruncUF64 => self.run_trunc_to_int::(), + &isa::Instruction::I64ExtendSI32 => self.run_extend::(), + &isa::Instruction::I64ExtendUI32 => self.run_extend::(), + &isa::Instruction::I64TruncSF32 => self.run_trunc_to_int::(), + &isa::Instruction::I64TruncUF32 => self.run_trunc_to_int::(), + &isa::Instruction::I64TruncSF64 => self.run_trunc_to_int::(), + &isa::Instruction::I64TruncUF64 => self.run_trunc_to_int::(), + &isa::Instruction::F32ConvertSI32 => self.run_extend::(), + &isa::Instruction::F32ConvertUI32 => self.run_extend::(), + &isa::Instruction::F32ConvertSI64 => self.run_wrap::(), + &isa::Instruction::F32ConvertUI64 => self.run_wrap::(), + &isa::Instruction::F32DemoteF64 => self.run_wrap::(), + &isa::Instruction::F64ConvertSI32 => self.run_extend::(), + &isa::Instruction::F64ConvertUI32 => self.run_extend::(), + &isa::Instruction::F64ConvertSI64 => self.run_extend::(), + &isa::Instruction::F64ConvertUI64 => self.run_extend::(), + &isa::Instruction::F64PromoteF32 => self.run_extend::(), + + &isa::Instruction::I32ReinterpretF32 => self.run_reinterpret::(), + &isa::Instruction::I64ReinterpretF64 => self.run_reinterpret::(), + &isa::Instruction::F32ReinterpretI32 => self.run_reinterpret::(), + &isa::Instruction::F64ReinterpretI64 => self.run_reinterpret::(), } } @@ -358,72 +368,45 @@ impl<'a, E: Externals> Interpreter<'a, E> { Err(TrapKind::Unreachable) } - fn run_nop(&mut self, _context: &mut FunctionContext) -> Result { - Ok(InstructionOutcome::RunNextInstruction) + fn run_br(&mut self, _context: &mut FunctionContext, target: isa::Target) -> Result { + Ok(InstructionOutcome::Branch(target)) } - fn run_block(&mut self, context: &mut FunctionContext, labels: &HashMap, block_type: BlockType) -> Result { - context.push_frame(labels, BlockFrameType::Block, block_type)?; - Ok(InstructionOutcome::RunNextInstruction) - } - - fn run_loop(&mut self, context: &mut FunctionContext, labels: &HashMap, block_type: BlockType) -> Result { - context.push_frame(labels, BlockFrameType::Loop, block_type)?; - Ok(InstructionOutcome::RunNextInstruction) - } - - fn run_if(&mut self, context: &mut FunctionContext, labels: &HashMap, block_type: BlockType) -> Result { - let condition: bool = context - .value_stack_mut() - .pop_as(); - let block_frame_type = if condition { BlockFrameType::IfTrue } else { - let else_pos = labels[&context.position]; - if !labels.contains_key(&else_pos) { - context.position = else_pos; - return Ok(InstructionOutcome::RunNextInstruction); - } - - context.position = else_pos; - BlockFrameType::IfFalse - }; - context.push_frame(labels, block_frame_type, block_type)?; - - Ok(InstructionOutcome::RunNextInstruction) - } - - fn run_else(&mut self, context: &mut FunctionContext, labels: &HashMap) -> Result { - let end_pos = labels[&context.position]; - context.pop_frame(false)?; - context.position = end_pos; - Ok(InstructionOutcome::RunNextInstruction) - } - - fn run_end(&mut self, context: &mut FunctionContext) -> Result { - context.pop_frame(false)?; - Ok(InstructionOutcome::End) - } - - fn run_br(&mut self, _context: &mut FunctionContext, label_idx: u32) -> Result { - Ok(InstructionOutcome::Branch(label_idx as usize)) - } - - fn run_br_if(&mut self, context: &mut FunctionContext, label_idx: u32) -> Result { - let condition = context.value_stack_mut().pop_as(); + fn run_br_nez(&mut self, target: isa::Target) -> Result { + let condition = self.value_stack.pop_as(); if condition { - Ok(InstructionOutcome::Branch(label_idx as usize)) + Ok(InstructionOutcome::Branch(target)) } else { Ok(InstructionOutcome::RunNextInstruction) } } - fn run_br_table(&mut self, context: &mut FunctionContext, table: &[u32], default: u32) -> Result { - let index: u32 = context.value_stack_mut() - .pop_as(); - Ok(InstructionOutcome::Branch(table.get(index as usize).cloned().unwrap_or(default) as usize)) + fn run_br_eqz(&mut self, target: isa::Target) -> Result { + let condition = self.value_stack.pop_as(); + if condition { + Ok(InstructionOutcome::RunNextInstruction) + } else { + + Ok(InstructionOutcome::Branch(target)) + } } - fn run_return(&mut self, _context: &mut FunctionContext) -> Result { - Ok(InstructionOutcome::Return) + fn run_br_table(&mut self, table: &[isa::Target]) -> Result { + let index: u32 = self.value_stack + .pop_as(); + + let dst = + if (index as usize) < table.len() - 1 { + table[index as usize].clone() + } else { + let len = table.len(); + table[len - 1].clone() + }; + Ok(InstructionOutcome::Branch(dst)) + } + + fn run_return(&mut self, drop: u32, keep: u8) -> Result { + Ok(InstructionOutcome::Return(drop, keep)) } fn run_call( @@ -443,8 +426,8 @@ impl<'a, E: Externals> Interpreter<'a, E> { context: &mut FunctionContext, signature_idx: u32, ) -> Result { - let table_func_idx: u32 = context - .value_stack_mut() + let table_func_idx: u32 = self + .value_stack .pop_as(); let table = context .module() @@ -469,46 +452,46 @@ impl<'a, E: Externals> Interpreter<'a, E> { Ok(InstructionOutcome::ExecuteCall(func_ref)) } - fn run_drop(&mut self, context: &mut FunctionContext) -> Result { - let _ = context - .value_stack_mut() + fn run_drop(&mut self) -> Result { + let _ = self + .value_stack .pop(); Ok(InstructionOutcome::RunNextInstruction) } - fn run_select(&mut self, context: &mut FunctionContext) -> Result { - let (left, mid, right) = context - .value_stack_mut() + fn run_select(&mut self) -> Result { + let (left, mid, right) = self + .value_stack .pop_triple(); let condition = right .try_into() .expect("Due to validation stack top should be I32"); let val = if condition { left } else { mid }; - context.value_stack_mut().push(val)?; + self.value_stack.push(val)?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_get_local(&mut self, context: &mut FunctionContext, index: u32) -> Result { - let val = context.get_local(index as usize); - context.value_stack_mut().push(val)?; + fn run_get_local(&mut self, index: u32) -> Result { + let val = *self.value_stack.pick_mut(index as usize); + self.value_stack.push(val)?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_set_local(&mut self, context: &mut FunctionContext, index: u32) -> Result { - let arg = context - .value_stack_mut() + fn run_set_local(&mut self, index: u32) -> Result { + let val = self + .value_stack .pop(); - context.set_local(index as usize, arg); + *self.value_stack.pick_mut(index as usize) = val; Ok(InstructionOutcome::RunNextInstruction) } - fn run_tee_local(&mut self, context: &mut FunctionContext, index: u32) -> Result { - let arg = context - .value_stack() + fn run_tee_local(&mut self, index: u32) -> Result { + let val = self + .value_stack .top() .clone(); - context.set_local(index as usize, arg); + *self.value_stack.pick_mut(index as usize) = val; Ok(InstructionOutcome::RunNextInstruction) } @@ -522,7 +505,7 @@ impl<'a, E: Externals> Interpreter<'a, E> { .global_by_index(index) .expect("Due to validation global should exists"); let val = global.get(); - context.value_stack_mut().push(val)?; + self.value_stack.push(val)?; Ok(InstructionOutcome::RunNextInstruction) } @@ -531,8 +514,8 @@ impl<'a, E: Externals> Interpreter<'a, E> { context: &mut FunctionContext, index: u32, ) -> Result { - let val = context - .value_stack_mut() + let val = self + .value_stack .pop(); let global = context .module() @@ -542,60 +525,60 @@ impl<'a, E: Externals> Interpreter<'a, E> { Ok(InstructionOutcome::RunNextInstruction) } - fn run_load(&mut self, context: &mut FunctionContext, _align: u32, offset: u32) -> Result + fn run_load(&mut self, context: &mut FunctionContext, offset: u32) -> Result where RuntimeValue: From, T: LittleEndianConvert { - let raw_address = context - .value_stack_mut() + let raw_address = self + .value_stack .pop_as(); let address = effective_address( offset, raw_address, )?; - let m = context.module() - .memory_by_index(DEFAULT_MEMORY_INDEX) + let m = context + .memory() .expect("Due to validation memory should exists"); let b = m.get(address, mem::size_of::()) .map_err(|_| TrapKind::MemoryAccessOutOfBounds)?; let n = T::from_little_endian(&b) .expect("Can't fail since buffer length should be size_of::"); - context.value_stack_mut().push(n.into())?; + self.value_stack.push(n.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_load_extend(&mut self, context: &mut FunctionContext, _align: u32, offset: u32) -> Result + fn run_load_extend(&mut self, context: &mut FunctionContext, offset: u32) -> Result where T: ExtendInto, RuntimeValue: From, T: LittleEndianConvert { - let raw_address = context - .value_stack_mut() + let raw_address = self + .value_stack .pop_as(); let address = effective_address( offset, raw_address, )?; - let m = context.module() - .memory_by_index(DEFAULT_MEMORY_INDEX) + let m = context + .memory() .expect("Due to validation memory should exists"); let b = m.get(address, mem::size_of::()) .map_err(|_| TrapKind::MemoryAccessOutOfBounds)?; let v = T::from_little_endian(&b) .expect("Can't fail since buffer length should be size_of::"); let stack_value: U = v.extend_into(); - context - .value_stack_mut() + self + .value_stack .push(stack_value.into()) .map_err(Into::into) .map(|_| InstructionOutcome::RunNextInstruction) } - fn run_store(&mut self, context: &mut FunctionContext, _align: u32, offset: u32) -> Result + fn run_store(&mut self, context: &mut FunctionContext, offset: u32) -> Result where T: FromRuntimeValue, T: LittleEndianConvert { - let stack_value = context - .value_stack_mut() + let stack_value = self + .value_stack .pop_as::() .into_little_endian(); - let raw_address = context - .value_stack_mut() + let raw_address = self + .value_stack .pop_as::(); let address = effective_address( @@ -603,8 +586,8 @@ impl<'a, E: Externals> Interpreter<'a, E> { raw_address, )?; - let m = context.module() - .memory_by_index(DEFAULT_MEMORY_INDEX) + let m = context + .memory() .expect("Due to validation memory should exists"); m.set(address, &stack_value) .map_err(|_| TrapKind::MemoryAccessOutOfBounds)?; @@ -614,7 +597,6 @@ impl<'a, E: Externals> Interpreter<'a, E> { fn run_store_wrap( &mut self, context: &mut FunctionContext, - _align: u32, offset: u32, ) -> Result where @@ -622,22 +604,22 @@ impl<'a, E: Externals> Interpreter<'a, E> { T: WrapInto, U: LittleEndianConvert, { - let stack_value: T = context - .value_stack_mut() + let stack_value: T = self + .value_stack .pop() .try_into() .expect("Due to validation value should be of proper type"); let stack_value = stack_value.wrap_into().into_little_endian(); - let raw_address = context - .value_stack_mut() + let raw_address = self + .value_stack .pop_as::(); let address = effective_address( offset, raw_address, )?; - let m = context.module() - .memory_by_index(DEFAULT_MEMORY_INDEX) + let m = context + .memory() .expect("Due to validation memory should exists"); m.set(address, &stack_value) .map_err(|_| TrapKind::MemoryAccessOutOfBounds)?; @@ -645,387 +627,371 @@ impl<'a, E: Externals> Interpreter<'a, E> { } fn run_current_memory(&mut self, context: &mut FunctionContext) -> Result { - let m = context.module() - .memory_by_index(DEFAULT_MEMORY_INDEX) + let m = context + .memory() .expect("Due to validation memory should exists"); let s = m.current_size().0; - context - .value_stack_mut() + self + .value_stack .push(RuntimeValue::I32(s as i32))?; Ok(InstructionOutcome::RunNextInstruction) } fn run_grow_memory(&mut self, context: &mut FunctionContext) -> Result { - let pages: u32 = context - .value_stack_mut() + let pages: u32 = self + .value_stack .pop_as(); - let m = context.module() - .memory_by_index(DEFAULT_MEMORY_INDEX) + let m = context + .memory() .expect("Due to validation memory should exists"); let m = match m.grow(Pages(pages as usize)) { Ok(Pages(new_size)) => new_size as u32, Err(_) => u32::MAX, // Returns -1 (or 0xFFFFFFFF) in case of error. }; - context - .value_stack_mut() + self + .value_stack .push(RuntimeValue::I32(m as i32))?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_const(&mut self, context: &mut FunctionContext, val: RuntimeValue) -> Result { - context - .value_stack_mut() + fn run_const(&mut self, val: RuntimeValue) -> Result { + self + .value_stack .push(val) .map_err(Into::into) .map(|_| InstructionOutcome::RunNextInstruction) } - fn run_relop(&mut self, context: &mut FunctionContext, f: F) -> Result + fn run_relop(&mut self, f: F) -> Result where T: FromRuntimeValue, F: FnOnce(T, T) -> bool, { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let v = if f(left, right) { RuntimeValue::I32(1) } else { RuntimeValue::I32(0) }; - context.value_stack_mut().push(v)?; + self.value_stack.push(v)?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_eqz(&mut self, context: &mut FunctionContext) -> Result + fn run_eqz(&mut self) -> Result where T: FromRuntimeValue, T: PartialEq + Default { - let v = context - .value_stack_mut() + let v = self + .value_stack .pop_as::(); let v = RuntimeValue::I32(if v == Default::default() { 1 } else { 0 }); - context.value_stack_mut().push(v)?; + self.value_stack.push(v)?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_eq(&mut self, context: &mut FunctionContext) -> Result + fn run_eq(&mut self) -> Result where T: FromRuntimeValue + PartialEq { - self.run_relop(context, |left: T, right: T| left == right) + self.run_relop(|left: T, right: T| left == right) } - fn run_ne(&mut self, context: &mut FunctionContext) -> Result + fn run_ne(&mut self) -> Result where T: FromRuntimeValue + PartialEq { - self.run_relop(context, |left: T, right: T| left != right) + self.run_relop(|left: T, right: T| left != right) } - fn run_lt(&mut self, context: &mut FunctionContext) -> Result + fn run_lt(&mut self) -> Result where T: FromRuntimeValue + PartialOrd { - self.run_relop(context, |left: T, right: T| left < right) + self.run_relop(|left: T, right: T| left < right) } - fn run_gt(&mut self, context: &mut FunctionContext) -> Result + fn run_gt(&mut self) -> Result where T: FromRuntimeValue + PartialOrd { - self.run_relop(context, |left: T, right: T| left > right) + self.run_relop(|left: T, right: T| left > right) } - fn run_lte(&mut self, context: &mut FunctionContext) -> Result + fn run_lte(&mut self) -> Result where T: FromRuntimeValue + PartialOrd { - self.run_relop(context, |left: T, right: T| left <= right) + self.run_relop(|left: T, right: T| left <= right) } - fn run_gte(&mut self, context: &mut FunctionContext) -> Result + fn run_gte(&mut self) -> Result where T: FromRuntimeValue + PartialOrd { - self.run_relop(context, |left: T, right: T| left >= right) + self.run_relop(|left: T, right: T| left >= right) } - fn run_unop(&mut self, context: &mut FunctionContext, f: F) -> Result + fn run_unop(&mut self, f: F) -> Result where F: FnOnce(T) -> U, T: FromRuntimeValue, RuntimeValue: From { - let v = context - .value_stack_mut() + let v = self + .value_stack .pop_as::(); let v = f(v); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_clz(&mut self, context: &mut FunctionContext) -> Result + fn run_clz(&mut self) -> Result where RuntimeValue: From, T: Integer + FromRuntimeValue { - self.run_unop(context, |v: T| v.leading_zeros()) + self.run_unop(|v: T| v.leading_zeros()) } - fn run_ctz(&mut self, context: &mut FunctionContext) -> Result + fn run_ctz(&mut self) -> Result where RuntimeValue: From, T: Integer + FromRuntimeValue { - self.run_unop(context, |v: T| v.trailing_zeros()) + self.run_unop(|v: T| v.trailing_zeros()) } - fn run_popcnt(&mut self, context: &mut FunctionContext) -> Result + fn run_popcnt(&mut self) -> Result where RuntimeValue: From, T: Integer + FromRuntimeValue { - self.run_unop(context, |v: T| v.count_ones()) + self.run_unop(|v: T| v.count_ones()) } - fn run_add(&mut self, context: &mut FunctionContext) -> Result + fn run_add(&mut self) -> Result where RuntimeValue: From, T: ArithmeticOps + FromRuntimeValue { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let v = left.add(right); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_sub(&mut self, context: &mut FunctionContext) -> Result + fn run_sub(&mut self) -> Result where RuntimeValue: From, T: ArithmeticOps + FromRuntimeValue { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let v = left.sub(right); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_mul(&mut self, context: &mut FunctionContext) -> Result + fn run_mul(&mut self) -> Result where RuntimeValue: From, T: ArithmeticOps + FromRuntimeValue { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let v = left.mul(right); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_div(&mut self, context: &mut FunctionContext) -> Result + fn run_div(&mut self) -> Result where RuntimeValue: From, T: TransmuteInto + FromRuntimeValue, U: ArithmeticOps + TransmuteInto { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let (left, right) = (left.transmute_into(), right.transmute_into()); let v = left.div(right)?; let v = v.transmute_into(); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_rem(&mut self, context: &mut FunctionContext) -> Result + fn run_rem(&mut self) -> Result where RuntimeValue: From, T: TransmuteInto + FromRuntimeValue, U: Integer + TransmuteInto { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let (left, right) = (left.transmute_into(), right.transmute_into()); let v = left.rem(right)?; let v = v.transmute_into(); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_and(&mut self, context: &mut FunctionContext) -> Result + fn run_and(&mut self) -> Result where RuntimeValue: From<::Output>, T: ops::BitAnd + FromRuntimeValue { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let v = left.bitand(right); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_or(&mut self, context: &mut FunctionContext) -> Result + fn run_or(&mut self) -> Result where RuntimeValue: From<::Output>, T: ops::BitOr + FromRuntimeValue { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let v = left.bitor(right); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_xor(&mut self, context: &mut FunctionContext) -> Result + fn run_xor(&mut self) -> Result where RuntimeValue: From<::Output>, T: ops::BitXor + FromRuntimeValue { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let v = left.bitxor(right); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_shl(&mut self, context: &mut FunctionContext, mask: T) -> Result + fn run_shl(&mut self, mask: T) -> Result where RuntimeValue: From<>::Output>, T: ops::Shl + ops::BitAnd + FromRuntimeValue { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let v = left.shl(right & mask); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_shr(&mut self, context: &mut FunctionContext, mask: U) -> Result + fn run_shr(&mut self, mask: U) -> Result where RuntimeValue: From, T: TransmuteInto + FromRuntimeValue, U: ops::Shr + ops::BitAnd, >::Output: TransmuteInto { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let (left, right) = (left.transmute_into(), right.transmute_into()); let v = left.shr(right & mask); let v = v.transmute_into(); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_rotl(&mut self, context: &mut FunctionContext) -> Result + fn run_rotl(&mut self) -> Result where RuntimeValue: From, T: Integer + FromRuntimeValue { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let v = left.rotl(right); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_rotr(&mut self, context: &mut FunctionContext) -> Result + fn run_rotr(&mut self) -> Result where RuntimeValue: From, T: Integer + FromRuntimeValue { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let v = left.rotr(right); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_abs(&mut self, context: &mut FunctionContext) -> Result + fn run_abs(&mut self) -> Result where RuntimeValue: From, T: Float + FromRuntimeValue { - self.run_unop(context, |v: T| v.abs()) + self.run_unop(|v: T| v.abs()) } - fn run_neg(&mut self, context: &mut FunctionContext) -> Result + fn run_neg(&mut self) -> Result where RuntimeValue: From<::Output>, T: ops::Neg + FromRuntimeValue { - self.run_unop(context, |v: T| v.neg()) + self.run_unop(|v: T| v.neg()) } - fn run_ceil(&mut self, context: &mut FunctionContext) -> Result + fn run_ceil(&mut self) -> Result where RuntimeValue: From, T: Float + FromRuntimeValue { - self.run_unop(context, |v: T| v.ceil()) + self.run_unop(|v: T| v.ceil()) } - fn run_floor(&mut self, context: &mut FunctionContext) -> Result + fn run_floor(&mut self) -> Result where RuntimeValue: From, T: Float + FromRuntimeValue { - self.run_unop(context, |v: T| v.floor()) + self.run_unop(|v: T| v.floor()) } - fn run_trunc(&mut self, context: &mut FunctionContext) -> Result + fn run_trunc(&mut self) -> Result where RuntimeValue: From, T: Float + FromRuntimeValue { - self.run_unop(context, |v: T| v.trunc()) + self.run_unop(|v: T| v.trunc()) } - fn run_nearest(&mut self, context: &mut FunctionContext) -> Result + fn run_nearest(&mut self) -> Result where RuntimeValue: From, T: Float + FromRuntimeValue { - self.run_unop(context, |v: T| v.nearest()) + self.run_unop(|v: T| v.nearest()) } - fn run_sqrt(&mut self, context: &mut FunctionContext) -> Result + fn run_sqrt(&mut self) -> Result where RuntimeValue: From, T: Float + FromRuntimeValue { - self.run_unop(context, |v: T| v.sqrt()) + self.run_unop(|v: T| v.sqrt()) } - fn run_min(&mut self, context: &mut FunctionContext) -> Result + fn run_min(&mut self) -> Result where RuntimeValue: From, T: Float + FromRuntimeValue { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let v = left.min(right); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_max(&mut self, context: &mut FunctionContext) -> Result + fn run_max(&mut self) -> Result where RuntimeValue: From, T: Float + FromRuntimeValue { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let v = left.max(right); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_copysign(&mut self, context: &mut FunctionContext) -> Result + fn run_copysign(&mut self) -> Result where RuntimeValue: From, T: Float + FromRuntimeValue { - let (left, right) = context - .value_stack_mut() - .pop_pair_as::() - .expect("Due to validation stack should contain pair of values"); + let (left, right) = self + .value_stack + .pop_pair_as::(); let v = left.copysign(right); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_wrap(&mut self, context: &mut FunctionContext) -> Result + fn run_wrap(&mut self) -> Result where RuntimeValue: From, T: WrapInto + FromRuntimeValue { - self.run_unop(context, |v: T| v.wrap_into()) + self.run_unop(|v: T| v.wrap_into()) } - fn run_trunc_to_int(&mut self, context: &mut FunctionContext) -> Result + fn run_trunc_to_int(&mut self) -> Result where RuntimeValue: From, T: TryTruncateInto + FromRuntimeValue, U: TransmuteInto, { - let v = context - .value_stack_mut() + let v = self + .value_stack .pop_as::(); v.try_truncate_into() .map(|v| v.transmute_into()) - .map(|v| context.value_stack_mut().push(v.into())) + .map(|v| self.value_stack.push(v.into())) .map(|_| InstructionOutcome::RunNextInstruction) } - fn run_extend(&mut self, context: &mut FunctionContext) -> Result + fn run_extend(&mut self) -> Result where RuntimeValue: From, T: ExtendInto + FromRuntimeValue, U: TransmuteInto { - let v = context - .value_stack_mut() + let v = self + .value_stack .pop_as::(); let v = v.extend_into().transmute_into(); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } - fn run_reinterpret(&mut self, context: &mut FunctionContext) -> Result + fn run_reinterpret(&mut self) -> Result where RuntimeValue: From, T: FromRuntimeValue, T: TransmuteInto { - let v = context - .value_stack_mut() + let v = self + .value_stack .pop_as::(); let v = v.transmute_into(); - context.value_stack_mut().push(v.into())?; + self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } @@ -1038,157 +1004,56 @@ struct FunctionContext { /// Internal function reference. pub function: FuncRef, pub module: ModuleRef, - /// Function return type. - pub return_type: BlockType, - /// Local variables. - pub locals: Vec, - /// Values stack. - pub value_stack: ValueStack, - /// Blocks frames stack. - pub frame_stack: StackWithLimit, + pub memory: Option, /// Current instruction position. pub position: usize, } impl FunctionContext { - pub fn new(function: FuncRef, value_stack_limit: usize, frame_stack_limit: usize, signature: &Signature, args: Vec) -> Self { + pub fn new(function: FuncRef) -> Self { let module = match *function.as_internal() { FuncInstanceInternal::Internal { ref module, .. } => module.upgrade().expect("module deallocated"), FuncInstanceInternal::Host { .. } => panic!("Host functions can't be called as internally defined functions; Thus FunctionContext can be created only with internally defined functions; qed"), }; + let memory = module.memory_by_index(DEFAULT_MEMORY_INDEX); FunctionContext { is_initialized: false, function: function, module: ModuleRef(module), - return_type: signature.return_type().map(|vt| BlockType::Value(vt.into_elements())).unwrap_or(BlockType::NoResult), - value_stack: ValueStack::with_limit(value_stack_limit), - frame_stack: StackWithLimit::with_limit(frame_stack_limit), - locals: args, + memory: memory, position: 0, } } - pub fn nested(&mut self, function: FuncRef) -> Result { - let (function_locals, module, function_return_type) = { - let module = match *function.as_internal() { - FuncInstanceInternal::Internal { ref module, .. } => module.upgrade().expect("module deallocated"), - FuncInstanceInternal::Host { .. } => panic!("Host functions can't be called as internally defined functions; Thus FunctionContext can be created only with internally defined functions; qed"), - }; - let function_type = function.signature(); - let function_return_type = function_type.return_type().map(|vt| BlockType::Value(vt.into_elements())).unwrap_or(BlockType::NoResult); - let function_locals = prepare_function_args(function_type, &mut self.value_stack); - (function_locals, module, function_return_type) - }; - - Ok(FunctionContext { - is_initialized: false, - function: function, - module: ModuleRef(module), - return_type: function_return_type, - value_stack: ValueStack::with_limit(self.value_stack.limit() - self.value_stack.len()), - frame_stack: StackWithLimit::with_limit(self.frame_stack.limit() - self.frame_stack.len()), - locals: function_locals, - position: 0, - }) - } - pub fn is_initialized(&self) -> bool { self.is_initialized } - pub fn initialize(&mut self, locals: &[Local]) { + pub fn initialize(&mut self, locals: &[Local], value_stack: &mut ValueStack) -> Result<(), TrapKind> { debug_assert!(!self.is_initialized); - self.is_initialized = true; let locals = locals.iter() .flat_map(|l| repeat(l.value_type()).take(l.count() as usize)) .map(::types::ValueType::from_elements) .map(RuntimeValue::default) .collect::>(); - self.locals.extend(locals); + + // TODO: Replace with extend. + for local in locals { + value_stack.push(local) + .map_err(|_| TrapKind::StackOverflow)?; + } + + self.is_initialized = true; + Ok(()) } pub fn module(&self) -> ModuleRef { self.module.clone() } - pub fn set_local(&mut self, index: usize, value: RuntimeValue) { - let l = self.locals.get_mut(index).expect("Due to validation local should exists"); - *l = value; - } - - pub fn get_local(&mut self, index: usize) -> RuntimeValue { - self.locals.get(index) - .cloned() - .expect("Due to validation local should exists") - } - - pub fn value_stack(&self) -> &ValueStack { - &self.value_stack - } - - pub fn value_stack_mut(&mut self) -> &mut ValueStack { - &mut self.value_stack - } - - pub fn frame_stack(&self) -> &StackWithLimit { - &self.frame_stack - } - - pub fn push_frame(&mut self, labels: &HashMap, frame_type: BlockFrameType, block_type: BlockType) -> Result<(), TrapKind> { - let begin_position = self.position; - let branch_position = match frame_type { - BlockFrameType::Function => usize::MAX, - BlockFrameType::Loop => begin_position, - BlockFrameType::IfTrue => { - let else_pos = labels[&begin_position]; - 1usize + match labels.get(&else_pos) { - Some(end_pos) => *end_pos, - None => else_pos, - } - }, - _ => labels[&begin_position] + 1, - }; - let end_position = match frame_type { - BlockFrameType::Function => usize::MAX, - _ => labels[&begin_position] + 1, - }; - - self.frame_stack.push(BlockFrame { - frame_type: frame_type, - block_type: block_type, - begin_position: begin_position, - branch_position: branch_position, - end_position: end_position, - value_stack_len: self.value_stack.len(), - polymorphic_stack: false, - }).map_err(|_| TrapKind::StackOverflow)?; - - Ok(()) - } - - pub fn discard_frame(&mut self) { - let _ = self.frame_stack.pop().expect("Due to validation frame stack shouldn't be empty"); - } - - pub fn pop_frame(&mut self, is_branch: bool) -> Result<(), TrapKind> { - let frame = self.frame_stack - .pop() - .expect("Due to validation frame stack shouldn't be empty"); - assert!(frame.value_stack_len <= self.value_stack.len(), "invalid stack len"); - - let frame_value = match frame.block_type { - BlockType::Value(_) if frame.frame_type != BlockFrameType::Loop || !is_branch => - Some(self.value_stack.pop()), - _ => None, - }; - self.value_stack.resize(frame.value_stack_len); - self.position = if is_branch { frame.branch_position } else { frame.end_position }; - if let Some(frame_value) = frame_value { - self.value_stack.push(frame_value)?; - } - - Ok(()) + pub fn memory(&self) -> Option<&MemoryRef> { + self.memory.as_ref() } } @@ -1243,65 +1108,94 @@ pub fn check_function_args(signature: &Signature, args: &[RuntimeValue]) -> Resu Ok(()) } +#[derive(Debug)] struct ValueStack { - stack_with_limit: StackWithLimit, + buf: Box<[RuntimeValue]>, + /// Index of the first free place in the stack. + sp: usize, } impl ValueStack { fn with_limit(limit: usize) -> ValueStack { + let mut buf = Vec::new(); + buf.resize(limit, RuntimeValue::I32(0)); + ValueStack { - stack_with_limit: StackWithLimit::with_limit(limit), + buf: buf.into_boxed_slice(), + sp: 0, } } + #[inline] + fn drop_keep(&mut self, drop: u32, keep: u8) { + assert!(keep <= 1); + + if keep == 1 { + let top = *self.top(); + *self.pick_mut(drop as usize + 1) = top; + } + + let cur_stack_len = self.len(); + self.sp = cur_stack_len - drop as usize; + } + + #[inline] fn pop_as(&mut self) -> T where T: FromRuntimeValue, { - let value = self.stack_with_limit - .pop() - .expect("Due to validation stack shouldn't be empty"); + let value = self.pop(); value.try_into().expect("Due to validation stack top's type should match") } - fn pop_pair_as(&mut self) -> Result<(T, T), Error> + #[inline] + fn pop_pair_as(&mut self) -> (T, T) where T: FromRuntimeValue, { let right = self.pop_as(); let left = self.pop_as(); - Ok((left, right)) + (left, right) } + #[inline] fn pop_triple(&mut self) -> (RuntimeValue, RuntimeValue, RuntimeValue) { - let right = self.stack_with_limit.pop().expect("Due to validation stack shouldn't be empty"); - let mid = self.stack_with_limit.pop().expect("Due to validation stack shouldn't be empty"); - let left = self.stack_with_limit.pop().expect("Due to validation stack shouldn't be empty"); + let right = self.pop(); + let mid = self.pop(); + let left = self.pop(); (left, mid, right) } - fn pop(&mut self) -> RuntimeValue { - self.stack_with_limit.pop().expect("Due to validation stack shouldn't be empty") - } - - fn push(&mut self, value: RuntimeValue) -> Result<(), TrapKind> { - self.stack_with_limit.push(value) - .map_err(|_| TrapKind::StackOverflow) - } - - fn resize(&mut self, new_len: usize) { - self.stack_with_limit.resize(new_len, RuntimeValue::I32(0)); - } - - fn len(&self) -> usize { - self.stack_with_limit.len() - } - - fn limit(&self) -> usize { - self.stack_with_limit.limit() - } - + #[inline] fn top(&self) -> &RuntimeValue { - self.stack_with_limit.top().expect("Due to validation stack shouldn't be empty") + self.pick(1) + } + + fn pick(&self, depth: usize) -> &RuntimeValue { + &self.buf[self.sp - depth] + } + + #[inline] + fn pick_mut(&mut self, depth: usize) -> &mut RuntimeValue { + &mut self.buf[self.sp - depth] + } + + #[inline] + fn pop(&mut self) -> RuntimeValue { + self.sp -= 1; + self.buf[self.sp] + } + + #[inline] + fn push(&mut self, value: RuntimeValue) -> Result<(), TrapKind> { + let cell = self.buf.get_mut(self.sp).ok_or_else(|| TrapKind::StackOverflow)?; + *cell = value; + self.sp += 1; + Ok(()) + } + + #[inline] + fn len(&self) -> usize { + self.sp } } diff --git a/src/validation/func.rs b/src/validation/func.rs index 1145644..cbad520 100644 --- a/src/validation/func.rs +++ b/src/validation/func.rs @@ -1,36 +1,97 @@ use std::u32; -use std::iter::repeat; use std::collections::HashMap; use parity_wasm::elements::{Opcode, BlockType, ValueType, TableElementType, Func, FuncBody}; use common::{DEFAULT_MEMORY_INDEX, DEFAULT_TABLE_INDEX}; use validation::context::ModuleContext; use validation::Error; +use validation::util::Locals; use common::stack::StackWithLimit; -use common::{BlockFrame, BlockFrameType}; +use isa; /// Maximum number of entries in value stack per function. const DEFAULT_VALUE_STACK_LIMIT: usize = 16384; /// Maximum number of entries in frame stack per function. const DEFAULT_FRAME_STACK_LIMIT: usize = 16384; -/// Function validation context. -struct FunctionValidationContext<'a> { - /// Wasm module - module: &'a ModuleContext, - /// Current instruction position. - position: usize, - /// Local variables. - locals: &'a [ValueType], - /// Value stack. - value_stack: StackWithLimit, - /// Frame stack. - frame_stack: StackWithLimit, - /// Function return type. None if validating expression. - return_type: Option, - /// Labels positions. - labels: HashMap, +/// Control stack frame. +#[derive(Debug, Clone)] +struct BlockFrame { + /// Frame type. + frame_type: BlockFrameType, + /// A signature, which is a block signature type indicating the number and types of result values of the region. + block_type: BlockType, + /// A label for reference to block instruction. + begin_position: usize, + /// A limit integer value, which is an index into the value stack indicating where to reset it to on a branch to that label. + value_stack_len: usize, + /// Boolean which signals whether value stack became polymorphic. Value stack starts in non-polymorphic state and + /// becomes polymorphic only after an instruction that never passes control further is executed, + /// i.e. `unreachable`, `br` (but not `br_if`!), etc. + polymorphic_stack: bool, +} + +/// Type of block frame. +#[derive(Debug, Clone, Copy, PartialEq)] +enum BlockFrameType { + /// Usual block frame. + /// + /// Can be used for an implicit function block. + Block { + end_label: LabelId, + }, + /// Loop frame (branching to the beginning of block). + Loop { + header: LabelId, + }, + /// True-subblock of if expression. + IfTrue { + /// If jump happens inside the if-true block then control will + /// land on this label. + end_label: LabelId, + + /// If the condition of the `if` statement is unsatisfied, control + /// will land on this label. This label might point to `else` block if it + /// exists. Otherwise it equal to `end_label`. + if_not: LabelId, + }, + /// False-subblock of if expression. + IfFalse { + end_label: LabelId, + } +} + +impl BlockFrameType { + /// Returns a label which should be used as a branch destination. + fn br_destination(&self) -> LabelId { + match *self { + BlockFrameType::Block { end_label } => end_label, + BlockFrameType::Loop { header } => header, + BlockFrameType::IfTrue { end_label, .. } => end_label, + BlockFrameType::IfFalse { end_label } => end_label, + } + } + + /// Returns a label which should be resolved at the `End` opcode. + /// + /// All block types have it except loops. Loops doesn't use end as a branch + /// destination. + fn end_label(&self) -> LabelId { + match *self { + BlockFrameType::Block { end_label } => end_label, + BlockFrameType::IfTrue { end_label, .. } => end_label, + BlockFrameType::IfFalse { end_label } => end_label, + BlockFrameType::Loop { .. } => panic!("loop doesn't use end label"), + } + } + + fn is_loop(&self) -> bool { + match *self { + BlockFrameType::Loop { .. } => true, + _ => false, + } + } } /// Value type on the stack. @@ -59,34 +120,31 @@ impl Validator { module: &ModuleContext, func: &Func, body: &FuncBody, - ) -> Result, Error> { + ) -> Result { let (params, result_ty) = module.require_function_type(func.type_ref())?; - // locals = (params + vars) - let mut locals = params.to_vec(); - locals.extend( - body.locals() - .iter() - .flat_map(|l| repeat(l.value_type()) - .take(l.count() as usize) - ), - ); - let mut context = FunctionValidationContext::new( &module, - &locals, + Locals::new(params, body.locals()), DEFAULT_VALUE_STACK_LIMIT, DEFAULT_FRAME_STACK_LIMIT, result_ty, ); - context.push_label(BlockFrameType::Function, result_ty)?; + let end_label = context.sink.new_label(); + context.push_label( + BlockFrameType::Block { + end_label, + }, + result_ty + )?; Validator::validate_function_block(&mut context, body.code().elements())?; + while !context.frame_stack.is_empty() { context.pop_label()?; } - Ok(context.into_labels()) + Ok(context.into_code()) } fn validate_function_block(context: &mut FunctionValidationContext, body: &[Opcode]) -> Result<(), Error> { @@ -97,7 +155,12 @@ impl Validator { loop { let opcode = &body[context.position]; - match Validator::validate_instruction(context, opcode)? { + + let outcome = Validator::validate_instruction(context, opcode) + .map_err(|err| Error(format!("At instruction {:?}(@{}): {}", opcode, context.position, err)))?; + + println!("opcode: {:?}, outcome={:?}", opcode, outcome); + match outcome { InstructionOutcome::ValidateNextInstruction => (), InstructionOutcome::Unreachable => context.unreachable()?, } @@ -112,196 +175,1011 @@ impl Validator { fn validate_instruction(context: &mut FunctionValidationContext, opcode: &Opcode) -> Result { use self::Opcode::*; match *opcode { - Unreachable => Ok(InstructionOutcome::Unreachable), - Nop => Ok(InstructionOutcome::ValidateNextInstruction), - Block(block_type) => Validator::validate_block(context, block_type), - Loop(block_type) => Validator::validate_loop(context, block_type), - If(block_type) => Validator::validate_if(context, block_type), - Else => Validator::validate_else(context), - End => Validator::validate_end(context), - Br(idx) => Validator::validate_br(context, idx), - BrIf(idx) => Validator::validate_br_if(context, idx), - BrTable(ref table, default) => Validator::validate_br_table(context, table, default), - Return => Validator::validate_return(context), + // Nop instruction doesn't do anything. It is safe to just skip it. + Nop => {}, - Call(index) => Validator::validate_call(context, index), - CallIndirect(index, _reserved) => Validator::validate_call_indirect(context, index), + Unreachable => { + context.sink.emit(isa::Instruction::Unreachable); + return Ok(InstructionOutcome::Unreachable); + }, - Drop => Validator::validate_drop(context), - Select => Validator::validate_select(context), + Block(block_type) => { + let end_label = context.sink.new_label(); + context.push_label( + BlockFrameType::Block { + end_label + }, + block_type + )?; + }, + Loop(block_type) => { + // Resolve loop header right away. + let header = context.sink.new_label(); + context.sink.resolve_label(header); - GetLocal(index) => Validator::validate_get_local(context, index), - SetLocal(index) => Validator::validate_set_local(context, index), - TeeLocal(index) => Validator::validate_tee_local(context, index), - GetGlobal(index) => Validator::validate_get_global(context, index), - SetGlobal(index) => Validator::validate_set_global(context, index), + context.push_label( + BlockFrameType::Loop { + header, + }, + block_type + )?; + }, + If(block_type) => { + // if + // .. + // end + // + // translates to -> + // + // br_if_not $if_not + // .. + // $if_not: - I32Load(align, _) => Validator::validate_load(context, align, 4, ValueType::I32), - I64Load(align, _) => Validator::validate_load(context, align, 8, ValueType::I64), - F32Load(align, _) => Validator::validate_load(context, align, 4, ValueType::F32), - F64Load(align, _) => Validator::validate_load(context, align, 8, ValueType::F64), - I32Load8S(align, _) => Validator::validate_load(context, align, 1, ValueType::I32), - I32Load8U(align, _) => Validator::validate_load(context, align, 1, ValueType::I32), - I32Load16S(align, _) => Validator::validate_load(context, align, 2, ValueType::I32), - I32Load16U(align, _) => Validator::validate_load(context, align, 2, ValueType::I32), - I64Load8S(align, _) => Validator::validate_load(context, align, 1, ValueType::I64), - I64Load8U(align, _) => Validator::validate_load(context, align, 1, ValueType::I64), - I64Load16S(align, _) => Validator::validate_load(context, align, 2, ValueType::I64), - I64Load16U(align, _) => Validator::validate_load(context, align, 2, ValueType::I64), - I64Load32S(align, _) => Validator::validate_load(context, align, 4, ValueType::I64), - I64Load32U(align, _) => Validator::validate_load(context, align, 4, ValueType::I64), + // if_not will be resolved whenever `end` or `else` operator will be met. + let if_not = context.sink.new_label(); + let end_label = context.sink.new_label(); - I32Store(align, _) => Validator::validate_store(context, align, 4, ValueType::I32), - I64Store(align, _) => Validator::validate_store(context, align, 8, ValueType::I64), - F32Store(align, _) => Validator::validate_store(context, align, 4, ValueType::F32), - F64Store(align, _) => Validator::validate_store(context, align, 8, ValueType::F64), - I32Store8(align, _) => Validator::validate_store(context, align, 1, ValueType::I32), - I32Store16(align, _) => Validator::validate_store(context, align, 2, ValueType::I32), - I64Store8(align, _) => Validator::validate_store(context, align, 1, ValueType::I64), - I64Store16(align, _) => Validator::validate_store(context, align, 2, ValueType::I64), - I64Store32(align, _) => Validator::validate_store(context, align, 4, ValueType::I64), + context.pop_value(ValueType::I32.into())?; + context.push_label( + BlockFrameType::IfTrue { + if_not, + end_label, + }, + block_type + )?; - CurrentMemory(_) => Validator::validate_current_memory(context), - GrowMemory(_) => Validator::validate_grow_memory(context), + context.sink.emit_br_eqz(Target { + label: if_not, + drop_keep: DropKeep { drop: 0, keep: 0 }, + }); + }, + Else => { + let (block_type, if_not, end_label) = { + let top_frame = context.top_label()?; - I32Const(_) => Validator::validate_const(context, ValueType::I32), - I64Const(_) => Validator::validate_const(context, ValueType::I64), - F32Const(_) => Validator::validate_const(context, ValueType::F32), - F64Const(_) => Validator::validate_const(context, ValueType::F64), + let (if_not, end_label) = match top_frame.frame_type { + BlockFrameType::IfTrue { if_not, end_label } => (if_not, end_label), + _ => return Err(Error("Misplaced else instruction".into())), + }; + (top_frame.block_type, if_not, end_label) + }; - I32Eqz => Validator::validate_testop(context, ValueType::I32), - I32Eq => Validator::validate_relop(context, ValueType::I32), - I32Ne => Validator::validate_relop(context, ValueType::I32), - I32LtS => Validator::validate_relop(context, ValueType::I32), - I32LtU => Validator::validate_relop(context, ValueType::I32), - I32GtS => Validator::validate_relop(context, ValueType::I32), - I32GtU => Validator::validate_relop(context, ValueType::I32), - I32LeS => Validator::validate_relop(context, ValueType::I32), - I32LeU => Validator::validate_relop(context, ValueType::I32), - I32GeS => Validator::validate_relop(context, ValueType::I32), - I32GeU => Validator::validate_relop(context, ValueType::I32), + // First, we need to finish if-true block: add a jump from the end of the if-true block + // to the "end_label" (it will be resolved at End). + context.sink.emit_br(Target { + label: end_label, + drop_keep: DropKeep { drop: 0, keep: 0 }, + }); - I64Eqz => Validator::validate_testop(context, ValueType::I64), - I64Eq => Validator::validate_relop(context, ValueType::I64), - I64Ne => Validator::validate_relop(context, ValueType::I64), - I64LtS => Validator::validate_relop(context, ValueType::I64), - I64LtU => Validator::validate_relop(context, ValueType::I64), - I64GtS => Validator::validate_relop(context, ValueType::I64), - I64GtU => Validator::validate_relop(context, ValueType::I64), - I64LeS => Validator::validate_relop(context, ValueType::I64), - I64LeU => Validator::validate_relop(context, ValueType::I64), - I64GeS => Validator::validate_relop(context, ValueType::I64), - I64GeU => Validator::validate_relop(context, ValueType::I64), + // Resolve `if_not` to here so when if condition is unsatisfied control flow + // will jump to this label. + context.sink.resolve_label(if_not); - F32Eq => Validator::validate_relop(context, ValueType::F32), - F32Ne => Validator::validate_relop(context, ValueType::F32), - F32Lt => Validator::validate_relop(context, ValueType::F32), - F32Gt => Validator::validate_relop(context, ValueType::F32), - F32Le => Validator::validate_relop(context, ValueType::F32), - F32Ge => Validator::validate_relop(context, ValueType::F32), + // Then, we validate. Validator will pop the if..else block and the push else..end block. + context.pop_label()?; - F64Eq => Validator::validate_relop(context, ValueType::F64), - F64Ne => Validator::validate_relop(context, ValueType::F64), - F64Lt => Validator::validate_relop(context, ValueType::F64), - F64Gt => Validator::validate_relop(context, ValueType::F64), - F64Le => Validator::validate_relop(context, ValueType::F64), - F64Ge => Validator::validate_relop(context, ValueType::F64), + if let BlockType::Value(value_type) = block_type { + context.pop_value(value_type.into())?; + } + context.push_label( + BlockFrameType::IfFalse { + end_label, + }, + block_type, + )?; + }, + End => { + { + let frame_type = context.top_label()?.frame_type; + if let BlockFrameType::IfTrue { if_not, .. } = frame_type { + if context.top_label()?.block_type != BlockType::NoResult { + return Err( + Error( + format!( + "If block without else required to have NoResult block type. But it have {:?} type", + context.top_label()?.block_type + ) + ) + ); + } - I32Clz => Validator::validate_unop(context, ValueType::I32), - I32Ctz => Validator::validate_unop(context, ValueType::I32), - I32Popcnt => Validator::validate_unop(context, ValueType::I32), - I32Add => Validator::validate_binop(context, ValueType::I32), - I32Sub => Validator::validate_binop(context, ValueType::I32), - I32Mul => Validator::validate_binop(context, ValueType::I32), - I32DivS => Validator::validate_binop(context, ValueType::I32), - I32DivU => Validator::validate_binop(context, ValueType::I32), - I32RemS => Validator::validate_binop(context, ValueType::I32), - I32RemU => Validator::validate_binop(context, ValueType::I32), - I32And => Validator::validate_binop(context, ValueType::I32), - I32Or => Validator::validate_binop(context, ValueType::I32), - I32Xor => Validator::validate_binop(context, ValueType::I32), - I32Shl => Validator::validate_binop(context, ValueType::I32), - I32ShrS => Validator::validate_binop(context, ValueType::I32), - I32ShrU => Validator::validate_binop(context, ValueType::I32), - I32Rotl => Validator::validate_binop(context, ValueType::I32), - I32Rotr => Validator::validate_binop(context, ValueType::I32), + context.sink.resolve_label(if_not); + } + } - I64Clz => Validator::validate_unop(context, ValueType::I64), - I64Ctz => Validator::validate_unop(context, ValueType::I64), - I64Popcnt => Validator::validate_unop(context, ValueType::I64), - I64Add => Validator::validate_binop(context, ValueType::I64), - I64Sub => Validator::validate_binop(context, ValueType::I64), - I64Mul => Validator::validate_binop(context, ValueType::I64), - I64DivS => Validator::validate_binop(context, ValueType::I64), - I64DivU => Validator::validate_binop(context, ValueType::I64), - I64RemS => Validator::validate_binop(context, ValueType::I64), - I64RemU => Validator::validate_binop(context, ValueType::I64), - I64And => Validator::validate_binop(context, ValueType::I64), - I64Or => Validator::validate_binop(context, ValueType::I64), - I64Xor => Validator::validate_binop(context, ValueType::I64), - I64Shl => Validator::validate_binop(context, ValueType::I64), - I64ShrS => Validator::validate_binop(context, ValueType::I64), - I64ShrU => Validator::validate_binop(context, ValueType::I64), - I64Rotl => Validator::validate_binop(context, ValueType::I64), - I64Rotr => Validator::validate_binop(context, ValueType::I64), + { + let frame_type = context.top_label()?.frame_type; - F32Abs => Validator::validate_unop(context, ValueType::F32), - F32Neg => Validator::validate_unop(context, ValueType::F32), - F32Ceil => Validator::validate_unop(context, ValueType::F32), - F32Floor => Validator::validate_unop(context, ValueType::F32), - F32Trunc => Validator::validate_unop(context, ValueType::F32), - F32Nearest => Validator::validate_unop(context, ValueType::F32), - F32Sqrt => Validator::validate_unop(context, ValueType::F32), - F32Add => Validator::validate_binop(context, ValueType::F32), - F32Sub => Validator::validate_binop(context, ValueType::F32), - F32Mul => Validator::validate_binop(context, ValueType::F32), - F32Div => Validator::validate_binop(context, ValueType::F32), - F32Min => Validator::validate_binop(context, ValueType::F32), - F32Max => Validator::validate_binop(context, ValueType::F32), - F32Copysign => Validator::validate_binop(context, ValueType::F32), + // If this end for a non-loop frame then we resolve it's label location to here. + if !frame_type.is_loop() { + let end_label = frame_type.end_label(); + context.sink.resolve_label(end_label); + } + } - F64Abs => Validator::validate_unop(context, ValueType::F64), - F64Neg => Validator::validate_unop(context, ValueType::F64), - F64Ceil => Validator::validate_unop(context, ValueType::F64), - F64Floor => Validator::validate_unop(context, ValueType::F64), - F64Trunc => Validator::validate_unop(context, ValueType::F64), - F64Nearest => Validator::validate_unop(context, ValueType::F64), - F64Sqrt => Validator::validate_unop(context, ValueType::F64), - F64Add => Validator::validate_binop(context, ValueType::F64), - F64Sub => Validator::validate_binop(context, ValueType::F64), - F64Mul => Validator::validate_binop(context, ValueType::F64), - F64Div => Validator::validate_binop(context, ValueType::F64), - F64Min => Validator::validate_binop(context, ValueType::F64), - F64Max => Validator::validate_binop(context, ValueType::F64), - F64Copysign => Validator::validate_binop(context, ValueType::F64), + if context.frame_stack.len() == 1 { + // We are about to close the last frame. Insert + // an explicit return. + let DropKeep { drop, keep } = context.drop_keep_return()?; + context.sink.emit(isa::Instruction::Return { + drop, + keep, + }); + } - I32WrapI64 => Validator::validate_cvtop(context, ValueType::I64, ValueType::I32), - I32TruncSF32 => Validator::validate_cvtop(context, ValueType::F32, ValueType::I32), - I32TruncUF32 => Validator::validate_cvtop(context, ValueType::F32, ValueType::I32), - I32TruncSF64 => Validator::validate_cvtop(context, ValueType::F64, ValueType::I32), - I32TruncUF64 => Validator::validate_cvtop(context, ValueType::F64, ValueType::I32), - I64ExtendSI32 => Validator::validate_cvtop(context, ValueType::I32, ValueType::I64), - I64ExtendUI32 => Validator::validate_cvtop(context, ValueType::I32, ValueType::I64), - I64TruncSF32 => Validator::validate_cvtop(context, ValueType::F32, ValueType::I64), - I64TruncUF32 => Validator::validate_cvtop(context, ValueType::F32, ValueType::I64), - I64TruncSF64 => Validator::validate_cvtop(context, ValueType::F64, ValueType::I64), - I64TruncUF64 => Validator::validate_cvtop(context, ValueType::F64, ValueType::I64), - F32ConvertSI32 => Validator::validate_cvtop(context, ValueType::I32, ValueType::F32), - F32ConvertUI32 => Validator::validate_cvtop(context, ValueType::I32, ValueType::F32), - F32ConvertSI64 => Validator::validate_cvtop(context, ValueType::I64, ValueType::F32), - F32ConvertUI64 => Validator::validate_cvtop(context, ValueType::I64, ValueType::F32), - F32DemoteF64 => Validator::validate_cvtop(context, ValueType::F64, ValueType::F32), - F64ConvertSI32 => Validator::validate_cvtop(context, ValueType::I32, ValueType::F64), - F64ConvertUI32 => Validator::validate_cvtop(context, ValueType::I32, ValueType::F64), - F64ConvertSI64 => Validator::validate_cvtop(context, ValueType::I64, ValueType::F64), - F64ConvertUI64 => Validator::validate_cvtop(context, ValueType::I64, ValueType::F64), - F64PromoteF32 => Validator::validate_cvtop(context, ValueType::F32, ValueType::F64), + context.pop_label()?; + }, + Br(depth) => { + Validator::validate_br(context, depth)?; - I32ReinterpretF32 => Validator::validate_cvtop(context, ValueType::F32, ValueType::I32), - I64ReinterpretF64 => Validator::validate_cvtop(context, ValueType::F64, ValueType::I64), - F32ReinterpretI32 => Validator::validate_cvtop(context, ValueType::I32, ValueType::F32), - F64ReinterpretI64 => Validator::validate_cvtop(context, ValueType::I64, ValueType::F64), + let target = context.require_target(depth)?; + context.sink.emit_br(target); + + return Ok(InstructionOutcome::Unreachable); + }, + BrIf(depth) => { + Validator::validate_br_if(context, depth)?; + + let target = context.require_target(depth)?; + context.sink.emit_br_nez(target); + }, + BrTable(ref table, default) => { + Validator::validate_br_table(context, table, default)?; + + let mut targets = Vec::new(); + for depth in table.iter() { + let target = context.require_target(*depth)?; + targets.push(target); + } + let default_target = context.require_target(default)?; + context.sink.emit_br_table(&targets, default_target); + + return Ok(InstructionOutcome::Unreachable); + }, + Return => { + let DropKeep { drop, keep } = context.drop_keep_return()?; + context.sink.emit(isa::Instruction::Return { + drop, + keep, + }); + + if let BlockType::Value(value_type) = context.return_type()? { + context.tee_value(value_type.into())?; + } + + return Ok(InstructionOutcome::Unreachable); + }, + + Call(index) => { + Validator::validate_call(context, index)?; + context.sink.emit(isa::Instruction::Call(index)); + }, + CallIndirect(index, _reserved) => { + Validator::validate_call_indirect(context, index)?; + context.sink.emit(isa::Instruction::CallIndirect(index)); + }, + + Drop => { + Validator::validate_drop(context)?; + context.sink.emit(isa::Instruction::Drop); + }, + Select => { + Validator::validate_select(context)?; + context.sink.emit(isa::Instruction::Select); + }, + + GetLocal(index) => { + // We need to calculate relative depth before validation since + // it will change value stack size. + let depth = context.relative_local_depth(index)?; + Validator::validate_get_local(context, index)?; + context.sink.emit( + isa::Instruction::GetLocal(depth), + ); + }, + SetLocal(index) => { + Validator::validate_set_local(context, index)?; + let depth = context.relative_local_depth(index)?; + context.sink.emit( + isa::Instruction::SetLocal(depth), + ); + }, + TeeLocal(index) => { + // We need to calculate relative depth before validation since + // it will change value stack size. + let depth = context.relative_local_depth(index)?; + Validator::validate_tee_local(context, index)?; + context.sink.emit( + isa::Instruction::TeeLocal(depth), + ); + }, + GetGlobal(index) => { + Validator::validate_get_global(context, index)?; + context.sink.emit(isa::Instruction::GetGlobal(index)); + + }, + SetGlobal(index) => { + Validator::validate_set_global(context, index)?; + context.sink.emit(isa::Instruction::SetGlobal(index)); + + }, + + I32Load(align, offset) => { + Validator::validate_load(context, align, 4, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Load(offset)); + + }, + I64Load(align, offset) => { + Validator::validate_load(context, align, 8, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Load(offset)); + + }, + F32Load(align, offset) => { + Validator::validate_load(context, align, 4, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Load(offset)); + + }, + F64Load(align, offset) => { + Validator::validate_load(context, align, 8, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Load(offset)); + + }, + I32Load8S(align, offset) => { + Validator::validate_load(context, align, 1, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Load8S(offset)); + + }, + I32Load8U(align, offset) => { + Validator::validate_load(context, align, 1, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Load8U(offset)); + + }, + I32Load16S(align, offset) => { + Validator::validate_load(context, align, 2, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Load16S(offset)); + + }, + I32Load16U(align, offset) => { + Validator::validate_load(context, align, 2, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Load16U(offset)); + + }, + I64Load8S(align, offset) => { + Validator::validate_load(context, align, 1, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Load8S(offset)); + + }, + I64Load8U(align, offset) => { + Validator::validate_load(context, align, 1, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Load8U(offset)); + + }, + I64Load16S(align, offset) => { + Validator::validate_load(context, align, 2, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Load16S(offset)); + + }, + I64Load16U(align, offset) => { + Validator::validate_load(context, align, 2, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Load16U(offset)); + + }, + I64Load32S(align, offset) => { + Validator::validate_load(context, align, 4, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Load32S(offset)); + + }, + I64Load32U(align, offset) => { + Validator::validate_load(context, align, 4, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Load32U(offset)); + + }, + + I32Store(align, offset) => { + Validator::validate_store(context, align, 4, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Store(offset)); + + }, + I64Store(align, offset) => { + Validator::validate_store(context, align, 8, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Store(offset)); + + }, + F32Store(align, offset) => { + Validator::validate_store(context, align, 4, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Store(offset)); + + }, + F64Store(align, offset) => { + Validator::validate_store(context, align, 8, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Store(offset)); + + }, + I32Store8(align, offset) => { + Validator::validate_store(context, align, 1, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Store8(offset)); + + }, + I32Store16(align, offset) => { + Validator::validate_store(context, align, 2, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Store16(offset)); + + }, + I64Store8(align, offset) => { + Validator::validate_store(context, align, 1, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Store8(offset)); + + }, + I64Store16(align, offset) => { + Validator::validate_store(context, align, 2, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Store16(offset)); + + }, + I64Store32(align, offset) => { + Validator::validate_store(context, align, 4, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Store32(offset)); + + }, + + CurrentMemory(_) => { + Validator::validate_current_memory(context)?; + context.sink.emit(isa::Instruction::CurrentMemory); + + }, + GrowMemory(_) => { + Validator::validate_grow_memory(context)?; + context.sink.emit(isa::Instruction::GrowMemory); + + }, + + I32Const(v) => { + Validator::validate_const(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Const(v)); + + }, + I64Const(v) => { + Validator::validate_const(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Const(v)); + + }, + F32Const(v) => { + Validator::validate_const(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Const(v)); + + }, + F64Const(v) => { + Validator::validate_const(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Const(v)); + + }, + + I32Eqz => { + Validator::validate_testop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Eqz); + + }, + I32Eq => { + Validator::validate_relop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Eq); + + }, + I32Ne => { + Validator::validate_relop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Ne); + + }, + I32LtS => { + Validator::validate_relop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32LtS); + + }, + I32LtU => { + Validator::validate_relop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32LtU); + + }, + I32GtS => { + Validator::validate_relop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32GtS); + + }, + I32GtU => { + Validator::validate_relop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32GtU); + + }, + I32LeS => { + Validator::validate_relop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32LeS); + + }, + I32LeU => { + Validator::validate_relop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32LeU); + + }, + I32GeS => { + Validator::validate_relop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32GeS); + + }, + I32GeU => { + Validator::validate_relop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32GeU); + + }, + + I64Eqz => { + Validator::validate_testop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Eqz); + + }, + I64Eq => { + Validator::validate_relop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Eq); + + }, + I64Ne => { + Validator::validate_relop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Ne); + + }, + I64LtS => { + Validator::validate_relop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64LtS); + + }, + I64LtU => { + Validator::validate_relop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64LtU); + + }, + I64GtS => { + Validator::validate_relop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64GtS); + + }, + I64GtU => { + Validator::validate_relop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64GtU); + + }, + I64LeS => { + Validator::validate_relop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64LeS); + + }, + I64LeU => { + Validator::validate_relop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64LeU); + + }, + I64GeS => { + Validator::validate_relop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64GeS); + + }, + I64GeU => { + Validator::validate_relop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64GeU); + + }, + + F32Eq => { + Validator::validate_relop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Eq); + + }, + F32Ne => { + Validator::validate_relop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Ne); + + }, + F32Lt => { + Validator::validate_relop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Lt); + + }, + F32Gt => { + Validator::validate_relop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Gt); + + }, + F32Le => { + Validator::validate_relop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Le); + + }, + F32Ge => { + Validator::validate_relop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Ge); + + }, + + F64Eq => { + Validator::validate_relop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Eq); + + }, + F64Ne => { + Validator::validate_relop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Ne); + + }, + F64Lt => { + Validator::validate_relop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Lt); + + }, + F64Gt => { + Validator::validate_relop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Gt); + + }, + F64Le => { + Validator::validate_relop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Le); + + }, + F64Ge => { + Validator::validate_relop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Ge); + + }, + + I32Clz => { + Validator::validate_unop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Clz); + + }, + I32Ctz => { + Validator::validate_unop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Ctz); + + }, + I32Popcnt => { + Validator::validate_unop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Popcnt); + + }, + I32Add => { + Validator::validate_binop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Add); + + }, + I32Sub => { + Validator::validate_binop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Sub); + + }, + I32Mul => { + Validator::validate_binop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Mul); + + }, + I32DivS => { + Validator::validate_binop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32DivS); + + }, + I32DivU => { + Validator::validate_binop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32DivU); + + }, + I32RemS => { + Validator::validate_binop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32RemS); + + }, + I32RemU => { + Validator::validate_binop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32RemU); + + }, + I32And => { + Validator::validate_binop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32And); + + }, + I32Or => { + Validator::validate_binop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Or); + + }, + I32Xor => { + Validator::validate_binop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Xor); + + }, + I32Shl => { + Validator::validate_binop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Shl); + + }, + I32ShrS => { + Validator::validate_binop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32ShrS); + + }, + I32ShrU => { + Validator::validate_binop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32ShrU); + + }, + I32Rotl => { + Validator::validate_binop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Rotl); + + }, + I32Rotr => { + Validator::validate_binop(context, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32Rotr); + + }, + + I64Clz => { + Validator::validate_unop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Clz); + + }, + I64Ctz => { + Validator::validate_unop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Ctz); + + }, + I64Popcnt => { + Validator::validate_unop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Popcnt); + + }, + I64Add => { + Validator::validate_binop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Add); + + }, + I64Sub => { + Validator::validate_binop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Sub); + + }, + I64Mul => { + Validator::validate_binop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Mul); + + }, + I64DivS => { + Validator::validate_binop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64DivS); + + }, + I64DivU => { + Validator::validate_binop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64DivU); + + }, + I64RemS => { + Validator::validate_binop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64RemS); + + }, + I64RemU => { + Validator::validate_binop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64RemU); + + }, + I64And => { + Validator::validate_binop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64And); + + }, + I64Or => { + Validator::validate_binop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Or); + + }, + I64Xor => { + Validator::validate_binop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Xor); + + }, + I64Shl => { + Validator::validate_binop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Shl); + + }, + I64ShrS => { + Validator::validate_binop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64ShrS); + + }, + I64ShrU => { + Validator::validate_binop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64ShrU); + + }, + I64Rotl => { + Validator::validate_binop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Rotl); + + }, + I64Rotr => { + Validator::validate_binop(context, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64Rotr); + + }, + + F32Abs => { + Validator::validate_unop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Abs); + + }, + F32Neg => { + Validator::validate_unop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Neg); + + }, + F32Ceil => { + Validator::validate_unop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Ceil); + + }, + F32Floor => { + Validator::validate_unop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Floor); + + }, + F32Trunc => { + Validator::validate_unop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Trunc); + + }, + F32Nearest => { + Validator::validate_unop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Nearest); + + }, + F32Sqrt => { + Validator::validate_unop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Sqrt); + + }, + F32Add => { + Validator::validate_binop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Add); + + }, + F32Sub => { + Validator::validate_binop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Sub); + + }, + F32Mul => { + Validator::validate_binop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Mul); + + }, + F32Div => { + Validator::validate_binop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Div); + + }, + F32Min => { + Validator::validate_binop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Min); + + }, + F32Max => { + Validator::validate_binop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Max); + + }, + F32Copysign => { + Validator::validate_binop(context, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32Copysign); + + }, + + F64Abs => { + Validator::validate_unop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Abs); + + }, + F64Neg => { + Validator::validate_unop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Neg); + + }, + F64Ceil => { + Validator::validate_unop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Ceil); + + }, + F64Floor => { + Validator::validate_unop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Floor); + + }, + F64Trunc => { + Validator::validate_unop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Trunc); + + }, + F64Nearest => { + Validator::validate_unop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Nearest); + + }, + F64Sqrt => { + Validator::validate_unop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Sqrt); + + }, + F64Add => { + Validator::validate_binop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Add); + + }, + F64Sub => { + Validator::validate_binop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Sub); + + }, + F64Mul => { + Validator::validate_binop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Mul); + + }, + F64Div => { + Validator::validate_binop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Div); + + }, + F64Min => { + Validator::validate_binop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Min); + + }, + F64Max => { + Validator::validate_binop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Max); + + }, + F64Copysign => { + Validator::validate_binop(context, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64Copysign); + + }, + + I32WrapI64 => { + Validator::validate_cvtop(context, ValueType::I64, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32WrapI64); + + }, + I32TruncSF32 => { + Validator::validate_cvtop(context, ValueType::F32, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32TruncSF32); + + }, + I32TruncUF32 => { + Validator::validate_cvtop(context, ValueType::F32, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32TruncUF32); + + }, + I32TruncSF64 => { + Validator::validate_cvtop(context, ValueType::F64, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32TruncSF64); + + }, + I32TruncUF64 => { + Validator::validate_cvtop(context, ValueType::F64, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32TruncUF64); + + }, + I64ExtendSI32 => { + Validator::validate_cvtop(context, ValueType::I32, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64ExtendSI32); + + }, + I64ExtendUI32 => { + Validator::validate_cvtop(context, ValueType::I32, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64ExtendUI32); + + }, + I64TruncSF32 => { + Validator::validate_cvtop(context, ValueType::F32, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64TruncSF32); + + }, + I64TruncUF32 => { + Validator::validate_cvtop(context, ValueType::F32, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64TruncUF32); + + }, + I64TruncSF64 => { + Validator::validate_cvtop(context, ValueType::F64, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64TruncSF64); + + }, + I64TruncUF64 => { + Validator::validate_cvtop(context, ValueType::F64, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64TruncUF64); + + }, + F32ConvertSI32 => { + Validator::validate_cvtop(context, ValueType::I32, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32ConvertSI32); + + }, + F32ConvertUI32 => { + Validator::validate_cvtop(context, ValueType::I32, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32ConvertUI32); + + }, + F32ConvertSI64 => { + Validator::validate_cvtop(context, ValueType::I64, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32ConvertSI64); + + }, + F32ConvertUI64 => { + Validator::validate_cvtop(context, ValueType::I64, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32ConvertUI64); + + }, + F32DemoteF64 => { + Validator::validate_cvtop(context, ValueType::F64, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32DemoteF64); + + }, + F64ConvertSI32 => { + Validator::validate_cvtop(context, ValueType::I32, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64ConvertSI32); + + }, + F64ConvertUI32 => { + Validator::validate_cvtop(context, ValueType::I32, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64ConvertUI32); + + }, + F64ConvertSI64 => { + Validator::validate_cvtop(context, ValueType::I64, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64ConvertSI64); + + }, + F64ConvertUI64 => { + Validator::validate_cvtop(context, ValueType::I64, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64ConvertUI64); + + }, + F64PromoteF32 => { + Validator::validate_cvtop(context, ValueType::F32, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64PromoteF32); + + }, + + I32ReinterpretF32 => { + Validator::validate_cvtop(context, ValueType::F32, ValueType::I32)?; + context.sink.emit(isa::Instruction::I32ReinterpretF32); + + }, + I64ReinterpretF64 => { + Validator::validate_cvtop(context, ValueType::F64, ValueType::I64)?; + context.sink.emit(isa::Instruction::I64ReinterpretF64); + + }, + F32ReinterpretI32 => { + Validator::validate_cvtop(context, ValueType::I32, ValueType::F32)?; + context.sink.emit(isa::Instruction::F32ReinterpretI32); + + }, + F64ReinterpretI64 => { + Validator::validate_cvtop(context, ValueType::I64, ValueType::F64)?; + context.sink.emit(isa::Instruction::F64ReinterpretI64); + + }, } + + Ok(InstructionOutcome::ValidateNextInstruction) } fn validate_const(context: &mut FunctionValidationContext, value_type: ValueType) -> Result { @@ -418,54 +1296,12 @@ impl Validator { Ok(InstructionOutcome::ValidateNextInstruction) } - fn validate_block(context: &mut FunctionValidationContext, block_type: BlockType) -> Result { - context.push_label(BlockFrameType::Block, block_type).map(|_| InstructionOutcome::ValidateNextInstruction) - } - - fn validate_loop(context: &mut FunctionValidationContext, block_type: BlockType) -> Result { - context.push_label(BlockFrameType::Loop, block_type).map(|_| InstructionOutcome::ValidateNextInstruction) - } - - fn validate_if(context: &mut FunctionValidationContext, block_type: BlockType) -> Result { - context.pop_value(ValueType::I32.into())?; - context.push_label(BlockFrameType::IfTrue, block_type).map(|_| InstructionOutcome::ValidateNextInstruction) - } - - fn validate_else(context: &mut FunctionValidationContext) -> Result { - let block_type = { - let top_frame = context.top_label()?; - if top_frame.frame_type != BlockFrameType::IfTrue { - return Err(Error("Misplaced else instruction".into())); - } - top_frame.block_type - }; - context.pop_label()?; - - if let BlockType::Value(value_type) = block_type { - context.pop_value(value_type.into())?; - } - context.push_label(BlockFrameType::IfFalse, block_type).map(|_| InstructionOutcome::ValidateNextInstruction) - } - - fn validate_end(context: &mut FunctionValidationContext) -> Result { - { - let top_frame = context.top_label()?; - if top_frame.frame_type == BlockFrameType::IfTrue { - if top_frame.block_type != BlockType::NoResult { - return Err(Error(format!("If block without else required to have NoResult block type. But it have {:?} type", top_frame.block_type))); - } - } - } - - context.pop_label().map(|_| InstructionOutcome::ValidateNextInstruction) - } - fn validate_br(context: &mut FunctionValidationContext, idx: u32) -> Result { let (frame_type, frame_block_type) = { let frame = context.require_label(idx)?; (frame.frame_type, frame.block_type) }; - if frame_type != BlockFrameType::Loop { + if !frame_type.is_loop() { if let BlockType::Value(value_type) = frame_block_type { context.tee_value(value_type.into())?; } @@ -480,7 +1316,7 @@ impl Validator { let frame = context.require_label(idx)?; (frame.frame_type, frame.block_type) }; - if frame_type != BlockFrameType::Loop { + if !frame_type.is_loop() { if let BlockType::Value(value_type) = frame_block_type { context.tee_value(value_type.into())?; } @@ -491,7 +1327,7 @@ impl Validator { fn validate_br_table(context: &mut FunctionValidationContext, table: &[u32], default: u32) -> Result { let required_block_type: BlockType = { let default_block = context.require_label(default)?; - let required_block_type = if default_block.frame_type != BlockFrameType::Loop { + let required_block_type = if !default_block.frame_type.is_loop() { default_block.block_type } else { BlockType::NoResult @@ -499,7 +1335,7 @@ impl Validator { for label in table { let label_block = context.require_label(*label)?; - let label_block_type = if label_block.frame_type != BlockFrameType::Loop { + let label_block_type = if !label_block.frame_type.is_loop() { label_block.block_type } else { BlockType::NoResult @@ -527,13 +1363,6 @@ impl Validator { Ok(InstructionOutcome::Unreachable) } - fn validate_return(context: &mut FunctionValidationContext) -> Result { - if let BlockType::Value(value_type) = context.return_type()? { - context.tee_value(value_type.into())?; - } - Ok(InstructionOutcome::Unreachable) - } - fn validate_call(context: &mut FunctionValidationContext, idx: u32) -> Result { let (argument_types, return_type) = context.module.require_function(idx)?; for argument_type in argument_types.iter().rev() { @@ -582,10 +1411,29 @@ impl Validator { } } +/// Function validation context. +struct FunctionValidationContext<'a> { + /// Wasm module + module: &'a ModuleContext, + /// Current instruction position. + position: usize, + /// Local variables. + locals: Locals<'a>, + /// Value stack. + value_stack: StackWithLimit, + /// Frame stack. + frame_stack: StackWithLimit, + /// Function return type. + return_type: BlockType, + + // TODO: comment + sink: Sink, +} + impl<'a> FunctionValidationContext<'a> { fn new( module: &'a ModuleContext, - locals: &'a [ValueType], + locals: Locals<'a>, value_stack_limit: usize, frame_stack_limit: usize, return_type: BlockType, @@ -596,8 +1444,8 @@ impl<'a> FunctionValidationContext<'a> { locals: locals, value_stack: StackWithLimit::with_limit(value_stack_limit), frame_stack: StackWithLimit::with_limit(frame_stack_limit), - return_type: Some(return_type), - labels: HashMap::new(), + return_type: return_type, + sink: Sink::new(), } } @@ -660,14 +1508,12 @@ impl<'a> FunctionValidationContext<'a> { frame_type: frame_type, block_type: block_type, begin_position: self.position, - branch_position: self.position, - end_position: self.position, value_stack_len: self.value_stack.len(), polymorphic_stack: false, })?) } - fn pop_label(&mut self) -> Result { + fn pop_label(&mut self) -> Result<(), Error> { // Don't pop frame yet. This is essential since we still might pop values from the value stack // and this in turn requires current frame to check whether or not we've reached // unreachable. @@ -688,14 +1534,11 @@ impl<'a> FunctionValidationContext<'a> { ))); } - if !self.frame_stack.is_empty() { - self.labels.insert(frame.begin_position, self.position); - } if let BlockType::Value(value_type) = frame.block_type { self.push_value(value_type.into())?; } - Ok(InstructionOutcome::ValidateNextInstruction) + Ok(()) } fn require_label(&self, idx: u32) -> Result<&BlockFrame, Error> { @@ -703,18 +1546,100 @@ impl<'a> FunctionValidationContext<'a> { } fn return_type(&self) -> Result { - self.return_type.ok_or(Error("Trying to return from expression".into())) + Ok(self.return_type) } fn require_local(&self, idx: u32) -> Result { - self.locals.get(idx as usize) - .cloned() - .map(Into::into) - .ok_or(Error(format!("Trying to access local with index {} when there are only {} locals", idx, self.locals.len()))) + Ok(self.locals.type_of_local(idx).map(StackValueType::from)?) } - fn into_labels(self) -> HashMap { - self.labels + fn require_target(&self, depth: u32) -> Result { + let is_stack_polymorphic = self.top_label()?.polymorphic_stack; + let frame = self.require_label(depth)?; + + let keep: u8 = match (frame.frame_type, frame.block_type) { + (BlockFrameType::Loop { .. }, _) => 0, + (_, BlockType::NoResult) => 0, + (_, BlockType::Value(_)) => 1, + }; + + let value_stack_height = self.value_stack.len(); + let drop = if is_stack_polymorphic { 0 } else { + // TODO: Remove this. + // println!("value_stack_height = {}", value_stack_height); + // println!("frame.value_stack_len = {}", frame.value_stack_len); + // println!("keep = {}", keep); + + if value_stack_height < frame.value_stack_len { + // TODO: Better error message. + return Err( + Error( + format!( + "Stack underflow detected: value stack height ({}) is lower than minimum stack len ({})", + value_stack_height, + frame.value_stack_len, + ) + ) + ); + } + if (value_stack_height as u32 - frame.value_stack_len as u32) < keep as u32 { + // TODO: Better error message. + return Err( + Error( + format!( + "Stack underflow detected: asked to keep {} values, but there are only {}", + keep, + (value_stack_height as u32 - frame.value_stack_len as u32), + ) + ) + ); + } + + (value_stack_height as u32 - frame.value_stack_len as u32) - keep as u32 + }; + + Ok(Target { + label: frame.frame_type.br_destination(), + drop_keep: DropKeep { + drop, + keep, + }, + }) + } + + fn drop_keep_return(&self) -> Result { + assert!( + !self.frame_stack.is_empty(), + "drop_keep_return can't be called with the frame stack empty" + ); + + let deepest = (self.frame_stack.len() - 1) as u32; + let mut drop_keep = self.require_target(deepest)?.drop_keep; + + // Drop all local variables and parameters upon exit. + drop_keep.drop += self.locals.count()?; + + Ok(drop_keep) + } + + fn relative_local_depth(&mut self, idx: u32) -> Result { + // TODO: Comment stack layout + let value_stack_height = self.value_stack.len() as u32; + let locals_and_params_count = self.locals.count()?; + + let depth = value_stack_height + .checked_add(locals_and_params_count) + .and_then(|x| x.checked_sub(idx)) + .ok_or_else(|| + Error(String::from("Locals range no in 32-bit range")) + )?; + Ok(depth) + } + + fn into_code(self) -> isa::Instructions { + isa::Instructions { + code: self.sink.into_inner(), + } } } @@ -765,3 +1690,188 @@ impl PartialEq for ValueType { other == self } } + +#[derive(Clone)] +struct DropKeep { + drop: u32, + keep: u8, +} + +#[derive(Clone)] +struct Target { + label: LabelId, + drop_keep: DropKeep, +} + +enum Reloc { + Br { + pc: u32, + }, + BrTable { + pc: u32, + idx: usize, + }, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +struct LabelId(usize); +enum Label { + Resolved(u32), + NotResolved, +} + +struct Sink { + ins: Vec, + labels: Vec