wasmi/src/runner.rs

1208 lines
39 KiB
Rust
Raw Normal View History

2018-01-17 15:32:33 +00:00
use std::mem;
use std::ops;
use std::{u32, usize};
use std::fmt;
2018-01-17 15:32:33 +00:00
use std::iter::repeat;
2018-06-14 13:55:08 +00:00
use parity_wasm::elements::Local;
use {Error, Trap, TrapKind, Signature};
2018-01-17 15:32:33 +00:00
use module::ModuleRef;
use func::{FuncRef, FuncInstance, FuncInstanceInternal};
2018-01-17 15:32:33 +00:00
use value::{
RuntimeValue, FromRuntimeValue, WrapInto, TryTruncateInto, ExtendInto,
2018-01-17 15:32:33 +00:00
ArithmeticOps, Integer, Float, LittleEndianConvert, TransmuteInto,
};
use host::Externals;
2018-06-13 08:32:44 +00:00
use common::{DEFAULT_MEMORY_INDEX, DEFAULT_TABLE_INDEX};
2018-01-17 15:32:33 +00:00
use common::stack::StackWithLimit;
use memory_units::Pages;
use nan_preserving_float::{F32, F64};
2018-06-13 08:32:44 +00:00
use isa;
2018-01-17 15:32:33 +00:00
/// Maximum number of entries in value stack.
2018-06-14 13:37:54 +00:00
pub const DEFAULT_VALUE_STACK_LIMIT: usize = (512 * 1024) / ::std::mem::size_of::<RuntimeValue>();
2018-06-14 13:06:45 +00:00
// TODO: Make these parameters changeble.
2018-06-14 13:37:54 +00:00
pub const DEFAULT_CALL_STACK_LIMIT: usize = 16 * 1024;
2018-01-17 15:32:33 +00:00
/// Interpreter action to execute after executing instruction.
pub enum InstructionOutcome {
/// Continue with next instruction.
RunNextInstruction,
2018-06-13 08:32:44 +00:00
/// Branch to an instruction at the given position.
Branch(isa::Target),
2018-01-17 15:32:33 +00:00
/// Execute function call.
ExecuteCall(FuncRef),
/// Return from current function block.
2018-06-13 19:28:28 +00:00
Return(u32, u8),
2018-01-17 15:32:33 +00:00
}
/// Function run result.
enum RunResult {
2018-06-13 19:28:28 +00:00
/// Function has returned.
Return,
2018-01-17 15:32:33 +00:00
/// Function is calling other function.
NestedCall(FuncRef),
}
2018-06-13 19:28:28 +00:00
/// Function interpreter.
pub struct Interpreter<'a, E: Externals + 'a> {
externals: &'a mut E,
value_stack: ValueStack,
}
2018-01-17 15:32:33 +00:00
impl<'a, E: Externals> Interpreter<'a, E> {
pub fn new(externals: &'a mut E) -> Interpreter<'a, E> {
2018-06-13 19:28:28 +00:00
let value_stack = ValueStack::with_limit(DEFAULT_VALUE_STACK_LIMIT);
2018-01-17 15:32:33 +00:00
Interpreter {
externals,
2018-06-13 19:28:28 +00:00
value_stack,
2018-01-17 15:32:33 +00:00
}
}
pub fn start_execution(&mut self, func: &FuncRef, args: &[RuntimeValue]) -> Result<Option<RuntimeValue>, Trap> {
2018-06-13 19:28:28 +00:00
for arg in args {
2018-06-14 13:55:08 +00:00
self.value_stack
.push(*arg)
.map_err(
// There is not enough space for pushing initial arguments.
// Weird, but bail out anyway.
|_| Trap::from(TrapKind::StackOverflow)
)?;
2018-06-13 19:28:28 +00:00
}
2018-06-14 14:06:52 +00:00
let initial_frame = FunctionContext::new(func.clone());
2018-06-14 14:06:52 +00:00
let mut call_stack = Vec::new();
call_stack.push(initial_frame);
2018-01-17 15:32:33 +00:00
2018-06-14 14:02:56 +00:00
self.run_interpreter_loop(&mut call_stack)?;
2018-06-13 19:28:28 +00:00
2018-06-14 13:06:45 +00:00
Ok(func.signature().return_type().map(|_vt| {
2018-06-13 19:28:28 +00:00
let return_value = self.value_stack
.pop();
return_value
}))
}
2018-06-14 14:06:52 +00:00
fn run_interpreter_loop(&mut self, call_stack: &mut Vec<FunctionContext>) -> Result<(), Trap> {
2018-01-17 15:32:33 +00:00
loop {
2018-06-14 14:02:56 +00:00
let mut function_context = call_stack
2018-06-14 14:06:52 +00:00
.pop()
2018-06-13 19:28:28 +00:00
.expect("on loop entry - not empty; on loop continue - checking for emptiness; qed");
2018-01-17 15:32:33 +00:00
let function_ref = function_context.function.clone();
2018-01-22 16:38:57 +00:00
let function_body = function_ref
.body()
.expect(
"Host functions checked in function_return below; Internal functions always have a body; qed"
);
2018-06-13 19:28:28 +00:00
2018-01-17 15:32:33 +00:00
if !function_context.is_initialized() {
2018-06-13 19:28:28 +00:00
// Initialize stack frame for the function call.
function_context.initialize(&function_body.locals, &mut self.value_stack)?;
2018-01-17 15:32:33 +00:00
}
2018-06-13 19:28:28 +00:00
let function_return =
self.do_run_function(
&mut function_context,
&function_body.code.code,
).map_err(Trap::new)?;
2018-01-17 15:32:33 +00:00
match function_return {
2018-06-13 19:28:28 +00:00
RunResult::Return => {
2018-06-14 14:06:52 +00:00
if call_stack.last().is_none() {
2018-06-14 14:02:56 +00:00
// This was the last frame in the call stack. This means we
// are done executing.
2018-06-13 19:28:28 +00:00
return Ok(());
2018-01-17 15:32:33 +00:00
}
},
RunResult::NestedCall(nested_func) => {
2018-06-14 14:02:56 +00:00
if call_stack.len() + 1 >= DEFAULT_CALL_STACK_LIMIT {
2018-06-14 13:06:45 +00:00
return Err(TrapKind::StackOverflow.into());
}
match *nested_func.as_internal() {
FuncInstanceInternal::Internal { .. } => {
let nested_context = function_context.nested(nested_func.clone()).map_err(Trap::new)?;
2018-06-14 14:06:52 +00:00
call_stack.push(function_context);
call_stack.push(nested_context);
2018-01-17 15:32:33 +00:00
},
FuncInstanceInternal::Host { ref signature, .. } => {
2018-06-13 19:28:28 +00:00
let args = prepare_function_args(signature, &mut self.value_stack);
2018-01-23 15:12:41 +00:00
let return_val = FuncInstance::invoke(&nested_func, &args, self.externals)?;
// Check if `return_val` matches the signature.
let value_ty = return_val.clone().map(|val| val.value_type());
let expected_ty = nested_func.signature().return_type();
if value_ty != expected_ty {
return Err(TrapKind::UnexpectedSignature.into());
}
2018-01-17 15:32:33 +00:00
if let Some(return_val) = return_val {
2018-06-13 19:28:28 +00:00
self.value_stack.push(return_val).map_err(Trap::new)?;
2018-01-17 15:32:33 +00:00
}
2018-06-14 14:06:52 +00:00
call_stack.push(function_context);
2018-01-17 15:32:33 +00:00
}
}
},
}
}
}
2018-06-13 08:32:44 +00:00
fn do_run_function(&mut self, function_context: &mut FunctionContext, instructions: &[isa::Instruction]) -> Result<RunResult, TrapKind> {
2018-01-17 15:32:33 +00:00
loop {
2018-06-13 08:32:44 +00:00
let instruction = &instructions[function_context.position];
2018-01-17 15:32:33 +00:00
2018-06-13 08:32:44 +00:00
match self.run_instruction(function_context, instruction)? {
2018-01-17 15:32:33 +00:00
InstructionOutcome::RunNextInstruction => function_context.position += 1,
2018-06-13 08:32:44 +00:00
InstructionOutcome::Branch(target) => {
function_context.position = target.dst_pc as usize;
2018-06-14 15:34:47 +00:00
self.value_stack.drop_keep(target.drop, target.keep);
2018-01-17 15:32:33 +00:00
},
InstructionOutcome::ExecuteCall(func_ref) => {
function_context.position += 1;
return Ok(RunResult::NestedCall(func_ref));
},
2018-06-13 19:28:28 +00:00
InstructionOutcome::Return(drop, keep) => {
2018-06-14 15:34:47 +00:00
self.value_stack.drop_keep(drop, keep);
2018-06-13 19:28:28 +00:00
break;
},
2018-01-17 15:32:33 +00:00
}
}
2018-06-13 19:28:28 +00:00
Ok(RunResult::Return)
2018-01-17 15:32:33 +00:00
}
2018-06-13 08:32:44 +00:00
fn run_instruction(&mut self, context: &mut FunctionContext, instruction: &isa::Instruction) -> Result<InstructionOutcome, TrapKind> {
match instruction {
&isa::Instruction::Unreachable => self.run_unreachable(context),
&isa::Instruction::Br(ref target) => self.run_br(context, target.clone()),
2018-06-14 13:55:08 +00:00
&isa::Instruction::BrIfEqz(ref target) => self.run_br_eqz(target.clone()),
&isa::Instruction::BrIfNez(ref target) => self.run_br_nez(target.clone()),
&isa::Instruction::BrTable(ref targets) => self.run_br_table(targets),
&isa::Instruction::Return { drop, keep } => self.run_return(drop, keep),
2018-06-13 08:32:44 +00:00
&isa::Instruction::Call(index) => self.run_call(context, index),
&isa::Instruction::CallIndirect(index) => self.run_call_indirect(context, index),
2018-06-14 13:55:08 +00:00
&isa::Instruction::Drop => self.run_drop(),
&isa::Instruction::Select => self.run_select(),
2018-06-13 08:32:44 +00:00
2018-06-14 13:55:08 +00:00
&isa::Instruction::GetLocal(depth) => self.run_get_local(depth),
&isa::Instruction::SetLocal(depth) => self.run_set_local(depth),
&isa::Instruction::TeeLocal(depth) => self.run_tee_local(depth),
2018-06-13 08:32:44 +00:00
&isa::Instruction::GetGlobal(index) => self.run_get_global(context, index),
&isa::Instruction::SetGlobal(index) => self.run_set_global(context, index),
&isa::Instruction::I32Load(offset) => self.run_load::<i32>(context, offset),
&isa::Instruction::I64Load(offset) => self.run_load::<i64>(context, offset),
&isa::Instruction::F32Load(offset) => self.run_load::<F32>(context, offset),
&isa::Instruction::F64Load(offset) => self.run_load::<F64>(context, offset),
&isa::Instruction::I32Load8S(offset) => self.run_load_extend::<i8, i32>(context, offset),
&isa::Instruction::I32Load8U(offset) => self.run_load_extend::<u8, i32>(context, offset),
&isa::Instruction::I32Load16S(offset) => self.run_load_extend::<i16, i32>(context, offset),
&isa::Instruction::I32Load16U(offset) => self.run_load_extend::<u16, i32>(context, offset),
&isa::Instruction::I64Load8S(offset) => self.run_load_extend::<i8, i64>(context, offset),
&isa::Instruction::I64Load8U(offset) => self.run_load_extend::<u8, i64>(context, offset),
&isa::Instruction::I64Load16S(offset) => self.run_load_extend::<i16, i64>(context, offset),
&isa::Instruction::I64Load16U(offset) => self.run_load_extend::<u16, i64>(context, offset),
&isa::Instruction::I64Load32S(offset) => self.run_load_extend::<i32, i64>(context, offset),
&isa::Instruction::I64Load32U(offset) => self.run_load_extend::<u32, i64>(context, offset),
&isa::Instruction::I32Store(offset) => self.run_store::<i32>(context, offset),
&isa::Instruction::I64Store(offset) => self.run_store::<i64>(context, offset),
&isa::Instruction::F32Store(offset) => self.run_store::<F32>(context, offset),
&isa::Instruction::F64Store(offset) => self.run_store::<F64>(context, offset),
&isa::Instruction::I32Store8(offset) => self.run_store_wrap::<i32, i8>(context, offset),
&isa::Instruction::I32Store16(offset) => self.run_store_wrap::<i32, i16>(context, offset),
&isa::Instruction::I64Store8(offset) => self.run_store_wrap::<i64, i8>(context, offset),
&isa::Instruction::I64Store16(offset) => self.run_store_wrap::<i64, i16>(context, offset),
&isa::Instruction::I64Store32(offset) => self.run_store_wrap::<i64, i32>(context, offset),
&isa::Instruction::CurrentMemory => self.run_current_memory(context),
&isa::Instruction::GrowMemory => self.run_grow_memory(context),
2018-06-14 13:55:08 +00:00
&isa::Instruction::I32Const(val) => self.run_const(val.into()),
&isa::Instruction::I64Const(val) => self.run_const(val.into()),
&isa::Instruction::F32Const(val) => self.run_const(RuntimeValue::decode_f32(val)),
&isa::Instruction::F64Const(val) => self.run_const(RuntimeValue::decode_f64(val)),
&isa::Instruction::I32Eqz => self.run_eqz::<i32>(),
&isa::Instruction::I32Eq => self.run_eq::<i32>(),
&isa::Instruction::I32Ne => self.run_ne::<i32>(),
&isa::Instruction::I32LtS => self.run_lt::<i32>(),
&isa::Instruction::I32LtU => self.run_lt::<u32>(),
&isa::Instruction::I32GtS => self.run_gt::<i32>(),
&isa::Instruction::I32GtU => self.run_gt::<u32>(),
&isa::Instruction::I32LeS => self.run_lte::<i32>(),
&isa::Instruction::I32LeU => self.run_lte::<u32>(),
&isa::Instruction::I32GeS => self.run_gte::<i32>(),
&isa::Instruction::I32GeU => self.run_gte::<u32>(),
&isa::Instruction::I64Eqz => self.run_eqz::<i64>(),
&isa::Instruction::I64Eq => self.run_eq::<i64>(),
&isa::Instruction::I64Ne => self.run_ne::<i64>(),
&isa::Instruction::I64LtS => self.run_lt::<i64>(),
&isa::Instruction::I64LtU => self.run_lt::<u64>(),
&isa::Instruction::I64GtS => self.run_gt::<i64>(),
&isa::Instruction::I64GtU => self.run_gt::<u64>(),
&isa::Instruction::I64LeS => self.run_lte::<i64>(),
&isa::Instruction::I64LeU => self.run_lte::<u64>(),
&isa::Instruction::I64GeS => self.run_gte::<i64>(),
&isa::Instruction::I64GeU => self.run_gte::<u64>(),
&isa::Instruction::F32Eq => self.run_eq::<F32>(),
&isa::Instruction::F32Ne => self.run_ne::<F32>(),
&isa::Instruction::F32Lt => self.run_lt::<F32>(),
&isa::Instruction::F32Gt => self.run_gt::<F32>(),
&isa::Instruction::F32Le => self.run_lte::<F32>(),
&isa::Instruction::F32Ge => self.run_gte::<F32>(),
&isa::Instruction::F64Eq => self.run_eq::<F64>(),
&isa::Instruction::F64Ne => self.run_ne::<F64>(),
&isa::Instruction::F64Lt => self.run_lt::<F64>(),
&isa::Instruction::F64Gt => self.run_gt::<F64>(),
&isa::Instruction::F64Le => self.run_lte::<F64>(),
&isa::Instruction::F64Ge => self.run_gte::<F64>(),
&isa::Instruction::I32Clz => self.run_clz::<i32>(),
&isa::Instruction::I32Ctz => self.run_ctz::<i32>(),
&isa::Instruction::I32Popcnt => self.run_popcnt::<i32>(),
&isa::Instruction::I32Add => self.run_add::<i32>(),
&isa::Instruction::I32Sub => self.run_sub::<i32>(),
&isa::Instruction::I32Mul => self.run_mul::<i32>(),
&isa::Instruction::I32DivS => self.run_div::<i32, i32>(),
&isa::Instruction::I32DivU => self.run_div::<i32, u32>(),
&isa::Instruction::I32RemS => self.run_rem::<i32, i32>(),
&isa::Instruction::I32RemU => self.run_rem::<i32, u32>(),
&isa::Instruction::I32And => self.run_and::<i32>(),
&isa::Instruction::I32Or => self.run_or::<i32>(),
&isa::Instruction::I32Xor => self.run_xor::<i32>(),
&isa::Instruction::I32Shl => self.run_shl::<i32>(0x1F),
&isa::Instruction::I32ShrS => self.run_shr::<i32, i32>(0x1F),
&isa::Instruction::I32ShrU => self.run_shr::<i32, u32>(0x1F),
&isa::Instruction::I32Rotl => self.run_rotl::<i32>(),
&isa::Instruction::I32Rotr => self.run_rotr::<i32>(),
&isa::Instruction::I64Clz => self.run_clz::<i64>(),
&isa::Instruction::I64Ctz => self.run_ctz::<i64>(),
&isa::Instruction::I64Popcnt => self.run_popcnt::<i64>(),
&isa::Instruction::I64Add => self.run_add::<i64>(),
&isa::Instruction::I64Sub => self.run_sub::<i64>(),
&isa::Instruction::I64Mul => self.run_mul::<i64>(),
&isa::Instruction::I64DivS => self.run_div::<i64, i64>(),
&isa::Instruction::I64DivU => self.run_div::<i64, u64>(),
&isa::Instruction::I64RemS => self.run_rem::<i64, i64>(),
&isa::Instruction::I64RemU => self.run_rem::<i64, u64>(),
&isa::Instruction::I64And => self.run_and::<i64>(),
&isa::Instruction::I64Or => self.run_or::<i64>(),
&isa::Instruction::I64Xor => self.run_xor::<i64>(),
&isa::Instruction::I64Shl => self.run_shl::<i64>(0x3F),
&isa::Instruction::I64ShrS => self.run_shr::<i64, i64>(0x3F),
&isa::Instruction::I64ShrU => self.run_shr::<i64, u64>(0x3F),
&isa::Instruction::I64Rotl => self.run_rotl::<i64>(),
&isa::Instruction::I64Rotr => self.run_rotr::<i64>(),
&isa::Instruction::F32Abs => self.run_abs::<F32>(),
&isa::Instruction::F32Neg => self.run_neg::<F32>(),
&isa::Instruction::F32Ceil => self.run_ceil::<F32>(),
&isa::Instruction::F32Floor => self.run_floor::<F32>(),
&isa::Instruction::F32Trunc => self.run_trunc::<F32>(),
&isa::Instruction::F32Nearest => self.run_nearest::<F32>(),
&isa::Instruction::F32Sqrt => self.run_sqrt::<F32>(),
&isa::Instruction::F32Add => self.run_add::<F32>(),
&isa::Instruction::F32Sub => self.run_sub::<F32>(),
&isa::Instruction::F32Mul => self.run_mul::<F32>(),
&isa::Instruction::F32Div => self.run_div::<F32, F32>(),
&isa::Instruction::F32Min => self.run_min::<F32>(),
&isa::Instruction::F32Max => self.run_max::<F32>(),
&isa::Instruction::F32Copysign => self.run_copysign::<F32>(),
&isa::Instruction::F64Abs => self.run_abs::<F64>(),
&isa::Instruction::F64Neg => self.run_neg::<F64>(),
&isa::Instruction::F64Ceil => self.run_ceil::<F64>(),
&isa::Instruction::F64Floor => self.run_floor::<F64>(),
&isa::Instruction::F64Trunc => self.run_trunc::<F64>(),
&isa::Instruction::F64Nearest => self.run_nearest::<F64>(),
&isa::Instruction::F64Sqrt => self.run_sqrt::<F64>(),
&isa::Instruction::F64Add => self.run_add::<F64>(),
&isa::Instruction::F64Sub => self.run_sub::<F64>(),
&isa::Instruction::F64Mul => self.run_mul::<F64>(),
&isa::Instruction::F64Div => self.run_div::<F64, F64>(),
&isa::Instruction::F64Min => self.run_min::<F64>(),
&isa::Instruction::F64Max => self.run_max::<F64>(),
&isa::Instruction::F64Copysign => self.run_copysign::<F64>(),
&isa::Instruction::I32WrapI64 => self.run_wrap::<i64, i32>(),
&isa::Instruction::I32TruncSF32 => self.run_trunc_to_int::<F32, i32, i32>(),
&isa::Instruction::I32TruncUF32 => self.run_trunc_to_int::<F32, u32, i32>(),
&isa::Instruction::I32TruncSF64 => self.run_trunc_to_int::<F64, i32, i32>(),
&isa::Instruction::I32TruncUF64 => self.run_trunc_to_int::<F64, u32, i32>(),
&isa::Instruction::I64ExtendSI32 => self.run_extend::<i32, i64, i64>(),
&isa::Instruction::I64ExtendUI32 => self.run_extend::<u32, u64, i64>(),
&isa::Instruction::I64TruncSF32 => self.run_trunc_to_int::<F32, i64, i64>(),
&isa::Instruction::I64TruncUF32 => self.run_trunc_to_int::<F32, u64, i64>(),
&isa::Instruction::I64TruncSF64 => self.run_trunc_to_int::<F64, i64, i64>(),
&isa::Instruction::I64TruncUF64 => self.run_trunc_to_int::<F64, u64, i64>(),
&isa::Instruction::F32ConvertSI32 => self.run_extend::<i32, F32, F32>(),
&isa::Instruction::F32ConvertUI32 => self.run_extend::<u32, F32, F32>(),
&isa::Instruction::F32ConvertSI64 => self.run_wrap::<i64, F32>(),
&isa::Instruction::F32ConvertUI64 => self.run_wrap::<u64, F32>(),
&isa::Instruction::F32DemoteF64 => self.run_wrap::<F64, F32>(),
&isa::Instruction::F64ConvertSI32 => self.run_extend::<i32, F64, F64>(),
&isa::Instruction::F64ConvertUI32 => self.run_extend::<u32, F64, F64>(),
&isa::Instruction::F64ConvertSI64 => self.run_extend::<i64, F64, F64>(),
&isa::Instruction::F64ConvertUI64 => self.run_extend::<u64, F64, F64>(),
&isa::Instruction::F64PromoteF32 => self.run_extend::<F32, F64, F64>(),
&isa::Instruction::I32ReinterpretF32 => self.run_reinterpret::<F32, i32>(),
&isa::Instruction::I64ReinterpretF64 => self.run_reinterpret::<F64, i64>(),
&isa::Instruction::F32ReinterpretI32 => self.run_reinterpret::<i32, F32>(),
&isa::Instruction::F64ReinterpretI64 => self.run_reinterpret::<i64, F64>(),
2018-01-17 15:32:33 +00:00
}
}
fn run_unreachable(&mut self, _context: &mut FunctionContext) -> Result<InstructionOutcome, TrapKind> {
Err(TrapKind::Unreachable)
2018-01-17 15:32:33 +00:00
}
2018-06-13 08:32:44 +00:00
fn run_br(&mut self, _context: &mut FunctionContext, target: isa::Target) -> Result<InstructionOutcome, TrapKind> {
Ok(InstructionOutcome::Branch(target))
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_br_nez(&mut self, target: isa::Target) -> Result<InstructionOutcome, TrapKind> {
2018-06-13 19:28:28 +00:00
let condition = self.value_stack.pop_as();
2018-06-13 08:32:44 +00:00
if condition {
Ok(InstructionOutcome::Branch(target))
} else {
Ok(InstructionOutcome::RunNextInstruction)
}
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_br_eqz(&mut self, target: isa::Target) -> Result<InstructionOutcome, TrapKind> {
2018-06-13 19:28:28 +00:00
let condition = self.value_stack.pop_as();
if condition {
2018-01-17 15:32:33 +00:00
Ok(InstructionOutcome::RunNextInstruction)
2018-06-13 08:32:44 +00:00
} else {
Ok(InstructionOutcome::Branch(target))
2018-01-17 15:32:33 +00:00
}
}
2018-06-14 13:55:08 +00:00
fn run_br_table(&mut self, table: &[isa::Target]) -> Result<InstructionOutcome, TrapKind> {
2018-06-13 19:28:28 +00:00
let index: u32 = self.value_stack
.pop_as();
2018-06-13 08:32:44 +00:00
let dst =
if (index as usize) < table.len() - 1 {
table[index as usize].clone()
} else {
let len = table.len();
table[len - 1].clone()
};
Ok(InstructionOutcome::Branch(dst))
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_return(&mut self, drop: u32, keep: u8) -> Result<InstructionOutcome, TrapKind> {
2018-06-13 19:28:28 +00:00
Ok(InstructionOutcome::Return(drop, keep))
2018-01-17 15:32:33 +00:00
}
fn run_call(
&mut self,
context: &mut FunctionContext,
func_idx: u32,
) -> Result<InstructionOutcome, TrapKind> {
2018-01-17 15:32:33 +00:00
let func = context
.module()
.func_by_index(func_idx)
.expect("Due to validation func should exists");
Ok(InstructionOutcome::ExecuteCall(func))
}
fn run_call_indirect(
&mut self,
context: &mut FunctionContext,
2018-01-18 12:48:43 +00:00
signature_idx: u32,
) -> Result<InstructionOutcome, TrapKind> {
2018-06-13 19:28:28 +00:00
let table_func_idx: u32 = self
.value_stack
.pop_as();
2018-01-17 15:32:33 +00:00
let table = context
.module()
.table_by_index(DEFAULT_TABLE_INDEX)
.expect("Due to validation table should exists");
let func_ref = table.get(table_func_idx)
.map_err(|_| TrapKind::TableAccessOutOfBounds)?
.ok_or_else(|| TrapKind::ElemUninitialized)?;
2018-01-17 15:32:33 +00:00
{
2018-01-18 12:48:43 +00:00
let actual_function_type = func_ref.signature();
2018-01-17 15:32:33 +00:00
let required_function_type = context
.module()
2018-01-18 12:48:43 +00:00
.signature_by_index(signature_idx)
2018-01-17 15:32:33 +00:00
.expect("Due to validation type should exists");
if &*required_function_type != actual_function_type {
2018-02-14 15:27:22 +00:00
return Err(TrapKind::UnexpectedSignature);
2018-01-17 15:32:33 +00:00
}
}
Ok(InstructionOutcome::ExecuteCall(func_ref))
}
2018-06-14 13:55:08 +00:00
fn run_drop(&mut self) -> Result<InstructionOutcome, TrapKind> {
2018-06-13 19:28:28 +00:00
let _ = self
.value_stack
.pop();
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_select(&mut self) -> Result<InstructionOutcome, TrapKind> {
2018-06-13 19:28:28 +00:00
let (left, mid, right) = self
.value_stack
.pop_triple();
let condition = right
.try_into()
.expect("Due to validation stack top should be I32");
let val = if condition { left } else { mid };
2018-06-13 19:28:28 +00:00
self.value_stack.push(val)?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_get_local(&mut self, index: u32) -> Result<InstructionOutcome, TrapKind> {
2018-06-14 15:34:47 +00:00
let val = *self.value_stack.pick_mut(index as usize);
2018-06-13 19:28:28 +00:00
self.value_stack.push(val)?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_set_local(&mut self, index: u32) -> Result<InstructionOutcome, TrapKind> {
2018-06-13 19:28:28 +00:00
let val = self
.value_stack
.pop();
2018-06-14 15:34:47 +00:00
*self.value_stack.pick_mut(index as usize) = val;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_tee_local(&mut self, index: u32) -> Result<InstructionOutcome, TrapKind> {
2018-06-13 19:28:28 +00:00
let val = self
.value_stack
.top()
.clone();
2018-06-14 15:34:47 +00:00
*self.value_stack.pick_mut(index as usize) = val;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
fn run_get_global(
&mut self,
context: &mut FunctionContext,
index: u32,
) -> Result<InstructionOutcome, TrapKind> {
2018-01-17 15:32:33 +00:00
let global = context
.module()
.global_by_index(index)
.expect("Due to validation global should exists");
let val = global.get();
2018-06-13 19:28:28 +00:00
self.value_stack.push(val)?;
2018-01-17 15:32:33 +00:00
Ok(InstructionOutcome::RunNextInstruction)
}
fn run_set_global(
&mut self,
context: &mut FunctionContext,
index: u32,
) -> Result<InstructionOutcome, TrapKind> {
2018-06-13 19:28:28 +00:00
let val = self
.value_stack
.pop();
2018-01-17 15:32:33 +00:00
let global = context
.module()
.global_by_index(index)
.expect("Due to validation global should exists");
global.set(val).expect("Due to validation set to a global should succeed");
Ok(InstructionOutcome::RunNextInstruction)
}
2018-06-13 08:32:44 +00:00
fn run_load<T>(&mut self, context: &mut FunctionContext, offset: u32) -> Result<InstructionOutcome, TrapKind>
2018-01-17 15:32:33 +00:00
where RuntimeValue: From<T>, T: LittleEndianConvert {
2018-06-13 19:28:28 +00:00
let raw_address = self
.value_stack
.pop_as();
let address =
effective_address(
offset,
raw_address,
)?;
2018-01-17 15:32:33 +00:00
let m = context.module()
.memory_by_index(DEFAULT_MEMORY_INDEX)
.expect("Due to validation memory should exists");
let b = m.get(address, mem::size_of::<T>())
.map_err(|_| TrapKind::MemoryAccessOutOfBounds)?;
let n = T::from_little_endian(&b)
.expect("Can't fail since buffer length should be size_of::<T>");
2018-06-13 19:28:28 +00:00
self.value_stack.push(n.into())?;
2018-01-17 15:32:33 +00:00
Ok(InstructionOutcome::RunNextInstruction)
}
2018-06-13 08:32:44 +00:00
fn run_load_extend<T, U>(&mut self, context: &mut FunctionContext, offset: u32) -> Result<InstructionOutcome, TrapKind>
2018-01-17 15:32:33 +00:00
where T: ExtendInto<U>, RuntimeValue: From<U>, T: LittleEndianConvert {
2018-06-13 19:28:28 +00:00
let raw_address = self
.value_stack
.pop_as();
let address =
effective_address(
offset,
raw_address,
)?;
2018-01-17 15:32:33 +00:00
let m = context.module()
.memory_by_index(DEFAULT_MEMORY_INDEX)
.expect("Due to validation memory should exists");
let b = m.get(address, mem::size_of::<T>())
.map_err(|_| TrapKind::MemoryAccessOutOfBounds)?;
let v = T::from_little_endian(&b)
.expect("Can't fail since buffer length should be size_of::<T>");
2018-01-17 15:32:33 +00:00
let stack_value: U = v.extend_into();
2018-06-13 19:28:28 +00:00
self
.value_stack
2018-01-17 15:32:33 +00:00
.push(stack_value.into())
.map_err(Into::into)
.map(|_| InstructionOutcome::RunNextInstruction)
}
2018-06-13 08:32:44 +00:00
fn run_store<T>(&mut self, context: &mut FunctionContext, offset: u32) -> Result<InstructionOutcome, TrapKind>
where T: FromRuntimeValue, T: LittleEndianConvert {
2018-06-13 19:28:28 +00:00
let stack_value = self
.value_stack
2018-01-17 15:32:33 +00:00
.pop_as::<T>()
.into_little_endian();
2018-06-13 19:28:28 +00:00
let raw_address = self
.value_stack
.pop_as::<u32>();
let address =
effective_address(
offset,
raw_address,
)?;
2018-01-17 15:32:33 +00:00
let m = context.module()
.memory_by_index(DEFAULT_MEMORY_INDEX)
.expect("Due to validation memory should exists");
m.set(address, &stack_value)
.map_err(|_| TrapKind::MemoryAccessOutOfBounds)?;
2018-01-17 15:32:33 +00:00
Ok(InstructionOutcome::RunNextInstruction)
}
fn run_store_wrap<T, U>(
&mut self,
context: &mut FunctionContext,
offset: u32,
) -> Result<InstructionOutcome, TrapKind>
2018-01-17 15:32:33 +00:00
where
T: FromRuntimeValue,
2018-01-17 15:32:33 +00:00
T: WrapInto<U>,
U: LittleEndianConvert,
{
2018-06-13 19:28:28 +00:00
let stack_value: T = self
.value_stack
2018-01-17 15:32:33 +00:00
.pop()
.try_into()
.expect("Due to validation value should be of proper type");
2018-01-17 15:32:33 +00:00
let stack_value = stack_value.wrap_into().into_little_endian();
2018-06-13 19:28:28 +00:00
let raw_address = self
.value_stack
.pop_as::<u32>();
let address =
effective_address(
offset,
raw_address,
)?;
2018-01-17 15:32:33 +00:00
let m = context.module()
.memory_by_index(DEFAULT_MEMORY_INDEX)
.expect("Due to validation memory should exists");
m.set(address, &stack_value)
.map_err(|_| TrapKind::MemoryAccessOutOfBounds)?;
2018-01-17 15:32:33 +00:00
Ok(InstructionOutcome::RunNextInstruction)
}
fn run_current_memory(&mut self, context: &mut FunctionContext) -> Result<InstructionOutcome, TrapKind> {
2018-01-17 15:32:33 +00:00
let m = context.module()
.memory_by_index(DEFAULT_MEMORY_INDEX)
.expect("Due to validation memory should exists");
let s = m.current_size().0;
2018-06-13 19:28:28 +00:00
self
.value_stack
2018-01-17 15:32:33 +00:00
.push(RuntimeValue::I32(s as i32))?;
Ok(InstructionOutcome::RunNextInstruction)
}
fn run_grow_memory(&mut self, context: &mut FunctionContext) -> Result<InstructionOutcome, TrapKind> {
2018-06-13 19:28:28 +00:00
let pages: u32 = self
.value_stack
.pop_as();
2018-01-17 15:32:33 +00:00
let m = context.module()
.memory_by_index(DEFAULT_MEMORY_INDEX)
.expect("Due to validation memory should exists");
let m = match m.grow(Pages(pages as usize)) {
Ok(Pages(new_size)) => new_size as u32,
Err(_) => u32::MAX, // Returns -1 (or 0xFFFFFFFF) in case of error.
};
2018-06-13 19:28:28 +00:00
self
.value_stack
2018-01-17 15:32:33 +00:00
.push(RuntimeValue::I32(m as i32))?;
Ok(InstructionOutcome::RunNextInstruction)
}
2018-06-14 13:55:08 +00:00
fn run_const(&mut self, val: RuntimeValue) -> Result<InstructionOutcome, TrapKind> {
2018-06-13 19:28:28 +00:00
self
.value_stack
2018-01-17 15:32:33 +00:00
.push(val)
.map_err(Into::into)
.map(|_| InstructionOutcome::RunNextInstruction)
}
2018-06-14 13:55:08 +00:00
fn run_relop<T, F>(&mut self, f: F) -> Result<InstructionOutcome, TrapKind>
where
T: FromRuntimeValue,
F: FnOnce(T, T) -> bool,
{
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let v = if f(left, right) {
RuntimeValue::I32(1)
} else {
RuntimeValue::I32(0)
};
2018-06-13 19:28:28 +00:00
self.value_stack.push(v)?;
Ok(InstructionOutcome::RunNextInstruction)
}
2018-06-14 13:55:08 +00:00
fn run_eqz<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where T: FromRuntimeValue, T: PartialEq<T> + Default {
2018-06-13 19:28:28 +00:00
let v = self
.value_stack
.pop_as::<T>();
let v = RuntimeValue::I32(if v == Default::default() { 1 } else { 0 });
2018-06-13 19:28:28 +00:00
self.value_stack.push(v)?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_eq<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where T: FromRuntimeValue + PartialEq<T>
{
2018-06-14 13:55:08 +00:00
self.run_relop(|left: T, right: T| left == right)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_ne<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where T: FromRuntimeValue + PartialEq<T> {
2018-06-14 13:55:08 +00:00
self.run_relop(|left: T, right: T| left != right)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_lt<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where T: FromRuntimeValue + PartialOrd<T> {
2018-06-14 13:55:08 +00:00
self.run_relop(|left: T, right: T| left < right)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_gt<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where T: FromRuntimeValue + PartialOrd<T> {
2018-06-14 13:55:08 +00:00
self.run_relop(|left: T, right: T| left > right)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_lte<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where T: FromRuntimeValue + PartialOrd<T> {
2018-06-14 13:55:08 +00:00
self.run_relop(|left: T, right: T| left <= right)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_gte<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where T: FromRuntimeValue + PartialOrd<T> {
2018-06-14 13:55:08 +00:00
self.run_relop(|left: T, right: T| left >= right)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_unop<T, U, F>(&mut self, f: F) -> Result<InstructionOutcome, TrapKind>
where
F: FnOnce(T) -> U,
T: FromRuntimeValue,
RuntimeValue: From<U>
{
2018-06-13 19:28:28 +00:00
let v = self
.value_stack
.pop_as::<T>();
let v = f(v);
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_clz<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: Integer<T> + FromRuntimeValue {
2018-06-14 13:55:08 +00:00
self.run_unop(|v: T| v.leading_zeros())
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_ctz<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: Integer<T> + FromRuntimeValue {
2018-06-14 13:55:08 +00:00
self.run_unop(|v: T| v.trailing_zeros())
}
2018-06-14 13:55:08 +00:00
fn run_popcnt<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: Integer<T> + FromRuntimeValue {
2018-06-14 13:55:08 +00:00
self.run_unop(|v: T| v.count_ones())
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_add<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: ArithmeticOps<T> + FromRuntimeValue {
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let v = left.add(right);
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_sub<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: ArithmeticOps<T> + FromRuntimeValue {
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let v = left.sub(right);
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_mul<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: ArithmeticOps<T> + FromRuntimeValue {
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let v = left.mul(right);
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_div<T, U>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: TransmuteInto<U> + FromRuntimeValue, U: ArithmeticOps<U> + TransmuteInto<T> {
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let (left, right) = (left.transmute_into(), right.transmute_into());
let v = left.div(right)?;
let v = v.transmute_into();
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_rem<T, U>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: TransmuteInto<U> + FromRuntimeValue, U: Integer<U> + TransmuteInto<T> {
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let (left, right) = (left.transmute_into(), right.transmute_into());
let v = left.rem(right)?;
let v = v.transmute_into();
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_and<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<<T as ops::BitAnd>::Output>, T: ops::BitAnd<T> + FromRuntimeValue {
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let v = left.bitand(right);
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_or<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<<T as ops::BitOr>::Output>, T: ops::BitOr<T> + FromRuntimeValue {
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let v = left.bitor(right);
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_xor<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<<T as ops::BitXor>::Output>, T: ops::BitXor<T> + FromRuntimeValue {
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let v = left.bitxor(right);
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_shl<T>(&mut self, mask: T) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<<T as ops::Shl<T>>::Output>, T: ops::Shl<T> + ops::BitAnd<T, Output=T> + FromRuntimeValue {
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let v = left.shl(right & mask);
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_shr<T, U>(&mut self, mask: U) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: TransmuteInto<U> + FromRuntimeValue, U: ops::Shr<U> + ops::BitAnd<U, Output=U>, <U as ops::Shr<U>>::Output: TransmuteInto<T> {
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let (left, right) = (left.transmute_into(), right.transmute_into());
let v = left.shr(right & mask);
let v = v.transmute_into();
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_rotl<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: Integer<T> + FromRuntimeValue {
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let v = left.rotl(right);
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_rotr<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: Integer<T> + FromRuntimeValue
{
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let v = left.rotr(right);
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_abs<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: Float<T> + FromRuntimeValue
{
2018-06-14 13:55:08 +00:00
self.run_unop(|v: T| v.abs())
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_neg<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where
RuntimeValue: From<<T as ops::Neg>::Output>,
T: ops::Neg + FromRuntimeValue
{
2018-06-14 13:55:08 +00:00
self.run_unop(|v: T| v.neg())
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_ceil<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: Float<T> + FromRuntimeValue
{
2018-06-14 13:55:08 +00:00
self.run_unop(|v: T| v.ceil())
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_floor<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: Float<T> + FromRuntimeValue
{
2018-06-14 13:55:08 +00:00
self.run_unop(|v: T| v.floor())
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_trunc<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: Float<T> + FromRuntimeValue
{
2018-06-14 13:55:08 +00:00
self.run_unop(|v: T| v.trunc())
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_nearest<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: Float<T> + FromRuntimeValue
{
2018-06-14 13:55:08 +00:00
self.run_unop(|v: T| v.nearest())
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_sqrt<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: Float<T> + FromRuntimeValue
{
2018-06-14 13:55:08 +00:00
self.run_unop(|v: T| v.sqrt())
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_min<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: Float<T> + FromRuntimeValue
{
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let v = left.min(right);
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_max<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: Float<T> + FromRuntimeValue {
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let v = left.max(right);
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_copysign<T>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<T>, T: Float<T> + FromRuntimeValue {
2018-06-13 19:28:28 +00:00
let (left, right) = self
.value_stack
2018-06-14 15:40:26 +00:00
.pop_pair_as::<T>();
let v = left.copysign(right);
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_wrap<T, U>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<U>, T: WrapInto<U> + FromRuntimeValue {
2018-06-14 13:55:08 +00:00
self.run_unop(|v: T| v.wrap_into())
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_trunc_to_int<T, U, V>(&mut self) -> Result<InstructionOutcome, TrapKind>
where RuntimeValue: From<V>, T: TryTruncateInto<U, TrapKind> + FromRuntimeValue, U: TransmuteInto<V>, {
2018-06-13 19:28:28 +00:00
let v = self
.value_stack
.pop_as::<T>();
v.try_truncate_into()
2018-01-17 15:32:33 +00:00
.map(|v| v.transmute_into())
2018-06-13 19:28:28 +00:00
.map(|v| self.value_stack.push(v.into()))
2018-01-17 15:32:33 +00:00
.map(|_| InstructionOutcome::RunNextInstruction)
}
2018-06-14 13:55:08 +00:00
fn run_extend<T, U, V>(&mut self) -> Result<InstructionOutcome, TrapKind>
where
RuntimeValue: From<V>, T: ExtendInto<U> + FromRuntimeValue, U: TransmuteInto<V>
{
2018-06-13 19:28:28 +00:00
let v = self
.value_stack
.pop_as::<T>();
let v = v.extend_into().transmute_into();
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
2018-06-14 13:55:08 +00:00
fn run_reinterpret<T, U>(&mut self) -> Result<InstructionOutcome, TrapKind>
where
RuntimeValue: From<U>, T: FromRuntimeValue, T: TransmuteInto<U>
{
2018-06-13 19:28:28 +00:00
let v = self
.value_stack
.pop_as::<T>();
let v = v.transmute_into();
2018-06-13 19:28:28 +00:00
self.value_stack.push(v.into())?;
Ok(InstructionOutcome::RunNextInstruction)
2018-01-17 15:32:33 +00:00
}
}
/// Function execution context.
struct FunctionContext {
/// Is context initialized.
pub is_initialized: bool,
/// Internal function reference.
pub function: FuncRef,
pub module: ModuleRef,
/// Current instruction position.
pub position: usize,
}
2018-01-17 15:32:33 +00:00
impl FunctionContext {
2018-06-13 19:28:28 +00:00
pub fn new(function: FuncRef) -> Self {
let module = match *function.as_internal() {
FuncInstanceInternal::Internal { ref module, .. } => module.upgrade().expect("module deallocated"),
FuncInstanceInternal::Host { .. } => panic!("Host functions can't be called as internally defined functions; Thus FunctionContext can be created only with internally defined functions; qed"),
2018-01-17 15:32:33 +00:00
};
FunctionContext {
is_initialized: false,
function: function,
2018-01-22 16:57:00 +00:00
module: ModuleRef(module),
2018-01-17 15:32:33 +00:00
position: 0,
}
}
pub fn nested(&mut self, function: FuncRef) -> Result<Self, TrapKind> {
2018-06-14 13:55:08 +00:00
let module = match *function.as_internal() {
FuncInstanceInternal::Internal { ref module, .. } => module.upgrade().expect("module deallocated"),
FuncInstanceInternal::Host { .. } => panic!("Host functions can't be called as internally defined functions; Thus FunctionContext can be created only with internally defined functions; qed"),
2018-01-17 15:32:33 +00:00
};
Ok(FunctionContext {
is_initialized: false,
function: function,
2018-01-22 16:57:00 +00:00
module: ModuleRef(module),
2018-01-17 15:32:33 +00:00
position: 0,
})
}
pub fn is_initialized(&self) -> bool {
self.is_initialized
}
2018-06-13 19:28:28 +00:00
pub fn initialize(&mut self, locals: &[Local], value_stack: &mut ValueStack) -> Result<(), TrapKind> {
2018-01-17 15:32:33 +00:00
debug_assert!(!self.is_initialized);
let locals = locals.iter()
.flat_map(|l| repeat(l.value_type()).take(l.count() as usize))
.map(::types::ValueType::from_elements)
2018-01-23 15:12:41 +00:00
.map(RuntimeValue::default)
2018-01-17 15:32:33 +00:00
.collect::<Vec<_>>();
2018-06-13 19:28:28 +00:00
// TODO: Replace with extend.
for local in locals {
value_stack.push(local)
.map_err(|_| TrapKind::StackOverflow)?;
}
2018-01-17 15:32:33 +00:00
2018-06-13 19:28:28 +00:00
self.is_initialized = true;
Ok(())
2018-01-17 15:32:33 +00:00
}
2018-06-13 19:28:28 +00:00
pub fn module(&self) -> ModuleRef {
self.module.clone()
2018-01-17 15:32:33 +00:00
}
}
impl fmt::Debug for FunctionContext {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "FunctionContext")
}
}
fn effective_address(address: u32, offset: u32) -> Result<u32, TrapKind> {
2018-01-17 15:32:33 +00:00
match offset.checked_add(address) {
None => Err(TrapKind::MemoryAccessOutOfBounds),
2018-01-17 15:32:33 +00:00
Some(address) => Ok(address),
}
}
fn prepare_function_args(
signature: &Signature,
caller_stack: &mut ValueStack,
) -> Vec<RuntimeValue> {
let mut args = signature
.params()
.iter()
.map(|_| caller_stack.pop())
.collect::<Vec<RuntimeValue>>();
args.reverse();
check_function_args(signature, &args).expect("Due to validation arguments should match");
args
}
pub fn check_function_args(signature: &Signature, args: &[RuntimeValue]) -> Result<(), Error> {
if signature.params().len() != args.len() {
return Err(
Error::Function(
format!(
"not enough arguments, given {} but expected: {}",
args.len(),
signature.params().len(),
)
)
);
}
signature.params().iter().cloned().zip(args).map(|(expected_type, param_value)| {
2018-01-17 15:32:33 +00:00
let actual_type = param_value.value_type();
if actual_type != expected_type {
return Err(Error::Function(format!("invalid parameter type {:?} when expected {:?}", actual_type, expected_type)));
}
Ok(())
2018-01-17 15:32:33 +00:00
}).collect::<Result<Vec<_>, _>>()?;
Ok(())
}
2018-06-13 19:28:28 +00:00
#[derive(Debug)]
struct ValueStack {
2018-06-14 15:34:47 +00:00
buf: Box<[RuntimeValue]>,
/// Index of the first free place in the stack.
sp: usize,
2018-01-17 15:32:33 +00:00
}
impl ValueStack {
fn with_limit(limit: usize) -> ValueStack {
2018-06-14 15:34:47 +00:00
let mut buf = Vec::new();
buf.resize(limit, RuntimeValue::I32(0));
ValueStack {
2018-06-14 15:34:47 +00:00
buf: buf.into_boxed_slice(),
sp: 0,
}
}
#[inline]
fn drop_keep(&mut self, drop: u32, keep: u8) {
assert!(keep <= 1);
if keep == 1 {
let top = *self.top();
*self.pick_mut(drop as usize + 1) = top;
}
2018-06-14 15:34:47 +00:00
let cur_stack_len = self.len();
2018-06-14 15:40:26 +00:00
self.sp = cur_stack_len - drop as usize;
}
2018-06-14 15:40:26 +00:00
#[inline]
fn pop_as<T>(&mut self) -> T
2018-01-17 15:32:33 +00:00
where
T: FromRuntimeValue,
2018-01-17 15:32:33 +00:00
{
2018-06-14 15:34:47 +00:00
let value = self.pop();
value.try_into().expect("Due to validation stack top's type should match")
2018-01-17 15:32:33 +00:00
}
2018-06-14 15:40:26 +00:00
#[inline]
fn pop_pair_as<T>(&mut self) -> (T, T)
2018-01-17 15:32:33 +00:00
where
T: FromRuntimeValue,
2018-01-17 15:32:33 +00:00
{
let right = self.pop_as();
let left = self.pop_as();
2018-06-14 15:40:26 +00:00
(left, right)
2018-01-17 15:32:33 +00:00
}
2018-06-14 15:40:26 +00:00
#[inline]
fn pop_triple(&mut self) -> (RuntimeValue, RuntimeValue, RuntimeValue) {
2018-06-14 15:34:47 +00:00
let right = self.pop();
let mid = self.pop();
let left = self.pop();
(left, mid, right)
2018-01-17 15:32:33 +00:00
}
2018-06-14 15:34:47 +00:00
#[inline]
fn top(&self) -> &RuntimeValue {
self.pick(1)
}
fn pick(&self, depth: usize) -> &RuntimeValue {
&self.buf[self.sp - depth]
}
#[inline]
2018-06-13 19:28:28 +00:00
fn pick_mut(&mut self, depth: usize) -> &mut RuntimeValue {
2018-06-14 15:34:47 +00:00
&mut self.buf[self.sp - depth]
2018-06-13 19:28:28 +00:00
}
2018-06-14 15:34:47 +00:00
#[inline]
fn pop(&mut self) -> RuntimeValue {
2018-06-14 15:34:47 +00:00
self.sp -= 1;
self.buf[self.sp]
}
2018-06-14 15:34:47 +00:00
#[inline]
fn push(&mut self, value: RuntimeValue) -> Result<(), TrapKind> {
2018-06-14 15:34:47 +00:00
let cell = self.buf.get_mut(self.sp).ok_or_else(|| TrapKind::StackOverflow)?;
*cell = value;
self.sp += 1;
Ok(())
}
2018-06-14 15:34:47 +00:00
#[inline]
fn len(&self) -> usize {
2018-06-14 15:34:47 +00:00
self.sp
}
2018-01-17 15:32:33 +00:00
}