#[allow(unused_imports)] use alloc::prelude::*; use validation::{DEFAULT_MEMORY_INDEX, DEFAULT_TABLE_INDEX}; use core::fmt; use core::ops; use core::{u32, usize}; use func::{FuncInstance, FuncInstanceInternal, FuncRef}; use host::Externals; use isa; use memory::MemoryRef; use memory_units::Pages; use module::ModuleRef; use nan_preserving_float::{F32, F64}; use parity_wasm::elements::Local; use value::{ ArithmeticOps, ExtendInto, Float, Integer, LittleEndianConvert, RuntimeValue, TransmuteInto, TryTruncateInto, WrapInto, }; use {Signature, Trap, TrapKind, ValueType}; /// Maximum number of entries in value stack. pub const DEFAULT_VALUE_STACK_LIMIT: usize = (1024 * 1024) / ::core::mem::size_of::(); // TODO: Make these parameters changeble. pub const DEFAULT_CALL_STACK_LIMIT: usize = 64 * 1024; /// This is a wrapper around u64 to allow us to treat runtime values as a tag-free `u64` /// (where if the runtime value is <64 bits the upper bits are 0). This is safe, since /// all of the possible runtime values are valid to create from 64 defined bits, so if /// types don't line up we get a logic error (which will ideally be caught by the wasm /// spec tests) and not undefined behaviour. /// /// At the boundary between the interpreter and the outside world we convert to the public /// `RuntimeValue` type, which can then be matched on. We can create a `RuntimeValue` from /// a `RuntimeValueInternal` only when the type is statically known, which it always is /// at these boundaries. #[derive(Copy, Clone, Debug, PartialEq, Default)] #[repr(transparent)] struct RuntimeValueInternal(pub u64); impl RuntimeValueInternal { pub fn with_type(self, ty: ValueType) -> RuntimeValue { match ty { ValueType::I32 => RuntimeValue::I32(<_>::from_runtime_value_internal(self)), ValueType::I64 => RuntimeValue::I64(<_>::from_runtime_value_internal(self)), ValueType::F32 => RuntimeValue::F32(<_>::from_runtime_value_internal(self)), ValueType::F64 => RuntimeValue::F64(<_>::from_runtime_value_internal(self)), } } } trait FromRuntimeValueInternal where Self: Sized, { fn from_runtime_value_internal(val: RuntimeValueInternal) -> Self; } macro_rules! impl_from_runtime_value_internal { ($($t:ty),*) => { $( impl FromRuntimeValueInternal for $t { fn from_runtime_value_internal( RuntimeValueInternal(val): RuntimeValueInternal, ) -> Self { val as _ } } impl From<$t> for RuntimeValueInternal { fn from(other: $t) -> Self { RuntimeValueInternal(other as _) } } )* }; } macro_rules! impl_from_runtime_value_internal_float { ($($t:ty),*) => { $( impl FromRuntimeValueInternal for $t { fn from_runtime_value_internal( RuntimeValueInternal(val): RuntimeValueInternal, ) -> Self { <$t>::from_bits(val as _) } } impl From<$t> for RuntimeValueInternal { fn from(other: $t) -> Self { RuntimeValueInternal(other.to_bits() as _) } } )* }; } impl_from_runtime_value_internal!(i8, u8, i16, u16, i32, u32, i64, u64); impl_from_runtime_value_internal_float!(f32, f64, F32, F64); impl From for RuntimeValueInternal { fn from(other: bool) -> Self { (if other { 1 } else { 0 }).into() } } impl FromRuntimeValueInternal for bool { fn from_runtime_value_internal(RuntimeValueInternal(val): RuntimeValueInternal) -> Self { val != 0 } } impl From for RuntimeValueInternal { fn from(other: RuntimeValue) -> Self { match other { RuntimeValue::I32(val) => val.into(), RuntimeValue::I64(val) => val.into(), RuntimeValue::F32(val) => val.into(), RuntimeValue::F64(val) => val.into(), } } } /// Interpreter action to execute after executing instruction. pub enum InstructionOutcome { /// Continue with next instruction. RunNextInstruction, /// Branch to an instruction at the given position. Branch(isa::Target), /// Execute function call. ExecuteCall(FuncRef), /// Return from current function block. Return(isa::DropKeep), } #[derive(PartialEq, Eq)] /// Function execution state, related to pause and resume. pub enum InterpreterState { /// The interpreter has been created, but has not been executed. Initialized, /// The interpreter has started execution, and cannot be called again if it exits normally, or no Host traps happened. Started, /// The interpreter has been executed, and returned a Host trap. It can resume execution by providing back a return /// value. Resumable(Option), } impl InterpreterState { pub fn is_resumable(&self) -> bool { match self { &InterpreterState::Resumable(_) => true, _ => false, } } } /// Function run result. enum RunResult { /// Function has returned. Return, /// Function is calling other function. NestedCall(FuncRef), } /// Function interpreter. pub struct Interpreter { value_stack: ValueStack, call_stack: Vec, return_type: Option, state: InterpreterState, } impl Interpreter { pub fn new(func: &FuncRef, args: &[RuntimeValue]) -> Result { let mut value_stack = ValueStack::with_limit(DEFAULT_VALUE_STACK_LIMIT); for &arg in args { let arg = arg.into(); value_stack.push(arg).map_err( // There is not enough space for pushing initial arguments. // Weird, but bail out anyway. |_| Trap::from(TrapKind::StackOverflow), )?; } let mut call_stack = Vec::new(); let initial_frame = FunctionContext::new(func.clone()); call_stack.push(initial_frame); let return_type = func.signature().return_type(); Ok(Interpreter { value_stack, call_stack, return_type, state: InterpreterState::Initialized, }) } pub fn state(&self) -> &InterpreterState { &self.state } pub fn start_execution<'a, E: Externals + 'a>( &mut self, externals: &'a mut E, ) -> Result, Trap> { // Ensure that the VM has not been executed. This is checked in `FuncInvocation::start_execution`. assert!(self.state == InterpreterState::Initialized); self.state = InterpreterState::Started; self.run_interpreter_loop(externals)?; let opt_return_value = self .return_type .map(|vt| self.value_stack.pop().with_type(vt)); // Ensure that stack is empty after the execution. This is guaranteed by the validation properties. assert!(self.value_stack.len() == 0); Ok(opt_return_value) } pub fn resume_execution<'a, E: Externals + 'a>( &mut self, return_val: Option, externals: &'a mut E, ) -> Result, Trap> { use core::mem::swap; // Ensure that the VM is resumable. This is checked in `FuncInvocation::resume_execution`. assert!(self.state.is_resumable()); let mut resumable_state = InterpreterState::Started; swap(&mut self.state, &mut resumable_state); if let Some(return_val) = return_val { self.value_stack .push(return_val.into()) .map_err(Trap::new)?; } self.run_interpreter_loop(externals)?; let opt_return_value = self .return_type .map(|vt| self.value_stack.pop().with_type(vt)); // Ensure that stack is empty after the execution. This is guaranteed by the validation properties. assert!(self.value_stack.len() == 0); Ok(opt_return_value) } fn run_interpreter_loop<'a, E: Externals + 'a>( &mut self, externals: &'a mut E, ) -> Result<(), Trap> { loop { let mut function_context = self.call_stack.pop().expect( "on loop entry - not empty; on loop continue - checking for emptiness; qed", ); let function_ref = function_context.function.clone(); let function_body = function_ref .body() .expect( "Host functions checked in function_return below; Internal functions always have a body; qed" ); if !function_context.is_initialized() { // Initialize stack frame for the function call. function_context.initialize(&function_body.locals, &mut self.value_stack)?; } let function_return = self .do_run_function(&mut function_context, &function_body.code) .map_err(Trap::new)?; match function_return { RunResult::Return => { if self.call_stack.last().is_none() { // This was the last frame in the call stack. This means we // are done executing. return Ok(()); } } RunResult::NestedCall(nested_func) => { if self.call_stack.len() + 1 >= DEFAULT_CALL_STACK_LIMIT { return Err(TrapKind::StackOverflow.into()); } match *nested_func.as_internal() { FuncInstanceInternal::Internal { .. } => { let nested_context = FunctionContext::new(nested_func.clone()); self.call_stack.push(function_context); self.call_stack.push(nested_context); } FuncInstanceInternal::Host { ref signature, .. } => { let args = prepare_function_args(signature, &mut self.value_stack); // We push the function context first. If the VM is not resumable, it does no harm. If it is, we then save the context here. self.call_stack.push(function_context); let return_val = match FuncInstance::invoke(&nested_func, &args, externals) { Ok(val) => val, Err(trap) => { if trap.kind().is_host() { self.state = InterpreterState::Resumable( nested_func.signature().return_type(), ); } return Err(trap); } }; // Check if `return_val` matches the signature. let value_ty = return_val.as_ref().map(|val| val.value_type()); let expected_ty = nested_func.signature().return_type(); if value_ty != expected_ty { return Err(TrapKind::UnexpectedSignature.into()); } if let Some(return_val) = return_val { self.value_stack .push(return_val.into()) .map_err(Trap::new)?; } } } } } } } fn do_run_function( &mut self, function_context: &mut FunctionContext, instructions: &isa::Instructions, ) -> Result { let mut iter = instructions.iterate_from(function_context.position); loop { let instruction = iter.next().expect( "Ran out of instructions, this should be impossible \ since validation ensures that we either have an explicit \ return or an implicit block `end`.", ); match self.run_instruction(function_context, &instruction)? { InstructionOutcome::RunNextInstruction => {} InstructionOutcome::Branch(target) => { iter = instructions.iterate_from(target.dst_pc); self.value_stack.drop_keep(target.drop_keep); } InstructionOutcome::ExecuteCall(func_ref) => { function_context.position = iter.position(); return Ok(RunResult::NestedCall(func_ref)); } InstructionOutcome::Return(drop_keep) => { self.value_stack.drop_keep(drop_keep); break; } } } Ok(RunResult::Return) } #[inline(always)] fn run_instruction( &mut self, context: &mut FunctionContext, instruction: &isa::Instruction, ) -> Result { match instruction { &isa::Instruction::Unreachable => self.run_unreachable(context), &isa::Instruction::Br(target) => self.run_br(context, target.clone()), &isa::Instruction::BrIfEqz(target) => self.run_br_eqz(target.clone()), &isa::Instruction::BrIfNez(target) => self.run_br_nez(target.clone()), &isa::Instruction::BrTable(targets) => self.run_br_table(targets), &isa::Instruction::Return(drop_keep) => self.run_return(drop_keep), &isa::Instruction::Call(index) => self.run_call(context, index), &isa::Instruction::CallIndirect(index) => self.run_call_indirect(context, index), &isa::Instruction::Drop => self.run_drop(), &isa::Instruction::Select => self.run_select(), &isa::Instruction::GetLocal(depth) => self.run_get_local(depth), &isa::Instruction::SetLocal(depth) => self.run_set_local(depth), &isa::Instruction::TeeLocal(depth) => self.run_tee_local(depth), &isa::Instruction::GetGlobal(index) => self.run_get_global(context, index), &isa::Instruction::SetGlobal(index) => self.run_set_global(context, index), &isa::Instruction::I32Load(offset) => self.run_load::(context, offset), &isa::Instruction::I64Load(offset) => self.run_load::(context, offset), &isa::Instruction::F32Load(offset) => self.run_load::(context, offset), &isa::Instruction::F64Load(offset) => self.run_load::(context, offset), &isa::Instruction::I32Load8S(offset) => { self.run_load_extend::(context, offset) } &isa::Instruction::I32Load8U(offset) => { self.run_load_extend::(context, offset) } &isa::Instruction::I32Load16S(offset) => { self.run_load_extend::(context, offset) } &isa::Instruction::I32Load16U(offset) => { self.run_load_extend::(context, offset) } &isa::Instruction::I64Load8S(offset) => { self.run_load_extend::(context, offset) } &isa::Instruction::I64Load8U(offset) => { self.run_load_extend::(context, offset) } &isa::Instruction::I64Load16S(offset) => { self.run_load_extend::(context, offset) } &isa::Instruction::I64Load16U(offset) => { self.run_load_extend::(context, offset) } &isa::Instruction::I64Load32S(offset) => { self.run_load_extend::(context, offset) } &isa::Instruction::I64Load32U(offset) => { self.run_load_extend::(context, offset) } &isa::Instruction::I32Store(offset) => self.run_store::(context, offset), &isa::Instruction::I64Store(offset) => self.run_store::(context, offset), &isa::Instruction::F32Store(offset) => self.run_store::(context, offset), &isa::Instruction::F64Store(offset) => self.run_store::(context, offset), &isa::Instruction::I32Store8(offset) => self.run_store_wrap::(context, offset), &isa::Instruction::I32Store16(offset) => { self.run_store_wrap::(context, offset) } &isa::Instruction::I64Store8(offset) => self.run_store_wrap::(context, offset), &isa::Instruction::I64Store16(offset) => { self.run_store_wrap::(context, offset) } &isa::Instruction::I64Store32(offset) => { self.run_store_wrap::(context, offset) } &isa::Instruction::CurrentMemory => self.run_current_memory(context), &isa::Instruction::GrowMemory => self.run_grow_memory(context), &isa::Instruction::I32Const(val) => self.run_const(val.into()), &isa::Instruction::I64Const(val) => self.run_const(val.into()), &isa::Instruction::F32Const(val) => self.run_const(val.into()), &isa::Instruction::F64Const(val) => self.run_const(val.into()), &isa::Instruction::I32Eqz => self.run_eqz::(), &isa::Instruction::I32Eq => self.run_eq::(), &isa::Instruction::I32Ne => self.run_ne::(), &isa::Instruction::I32LtS => self.run_lt::(), &isa::Instruction::I32LtU => self.run_lt::(), &isa::Instruction::I32GtS => self.run_gt::(), &isa::Instruction::I32GtU => self.run_gt::(), &isa::Instruction::I32LeS => self.run_lte::(), &isa::Instruction::I32LeU => self.run_lte::(), &isa::Instruction::I32GeS => self.run_gte::(), &isa::Instruction::I32GeU => self.run_gte::(), &isa::Instruction::I64Eqz => self.run_eqz::(), &isa::Instruction::I64Eq => self.run_eq::(), &isa::Instruction::I64Ne => self.run_ne::(), &isa::Instruction::I64LtS => self.run_lt::(), &isa::Instruction::I64LtU => self.run_lt::(), &isa::Instruction::I64GtS => self.run_gt::(), &isa::Instruction::I64GtU => self.run_gt::(), &isa::Instruction::I64LeS => self.run_lte::(), &isa::Instruction::I64LeU => self.run_lte::(), &isa::Instruction::I64GeS => self.run_gte::(), &isa::Instruction::I64GeU => self.run_gte::(), &isa::Instruction::F32Eq => self.run_eq::(), &isa::Instruction::F32Ne => self.run_ne::(), &isa::Instruction::F32Lt => self.run_lt::(), &isa::Instruction::F32Gt => self.run_gt::(), &isa::Instruction::F32Le => self.run_lte::(), &isa::Instruction::F32Ge => self.run_gte::(), &isa::Instruction::F64Eq => self.run_eq::(), &isa::Instruction::F64Ne => self.run_ne::(), &isa::Instruction::F64Lt => self.run_lt::(), &isa::Instruction::F64Gt => self.run_gt::(), &isa::Instruction::F64Le => self.run_lte::(), &isa::Instruction::F64Ge => self.run_gte::(), &isa::Instruction::I32Clz => self.run_clz::(), &isa::Instruction::I32Ctz => self.run_ctz::(), &isa::Instruction::I32Popcnt => self.run_popcnt::(), &isa::Instruction::I32Add => self.run_add::(), &isa::Instruction::I32Sub => self.run_sub::(), &isa::Instruction::I32Mul => self.run_mul::(), &isa::Instruction::I32DivS => self.run_div::(), &isa::Instruction::I32DivU => self.run_div::(), &isa::Instruction::I32RemS => self.run_rem::(), &isa::Instruction::I32RemU => self.run_rem::(), &isa::Instruction::I32And => self.run_and::(), &isa::Instruction::I32Or => self.run_or::(), &isa::Instruction::I32Xor => self.run_xor::(), &isa::Instruction::I32Shl => self.run_shl::(0x1F), &isa::Instruction::I32ShrS => self.run_shr::(0x1F), &isa::Instruction::I32ShrU => self.run_shr::(0x1F), &isa::Instruction::I32Rotl => self.run_rotl::(), &isa::Instruction::I32Rotr => self.run_rotr::(), &isa::Instruction::I64Clz => self.run_clz::(), &isa::Instruction::I64Ctz => self.run_ctz::(), &isa::Instruction::I64Popcnt => self.run_popcnt::(), &isa::Instruction::I64Add => self.run_add::(), &isa::Instruction::I64Sub => self.run_sub::(), &isa::Instruction::I64Mul => self.run_mul::(), &isa::Instruction::I64DivS => self.run_div::(), &isa::Instruction::I64DivU => self.run_div::(), &isa::Instruction::I64RemS => self.run_rem::(), &isa::Instruction::I64RemU => self.run_rem::(), &isa::Instruction::I64And => self.run_and::(), &isa::Instruction::I64Or => self.run_or::(), &isa::Instruction::I64Xor => self.run_xor::(), &isa::Instruction::I64Shl => self.run_shl::(0x3F), &isa::Instruction::I64ShrS => self.run_shr::(0x3F), &isa::Instruction::I64ShrU => self.run_shr::(0x3F), &isa::Instruction::I64Rotl => self.run_rotl::(), &isa::Instruction::I64Rotr => self.run_rotr::(), &isa::Instruction::F32Abs => self.run_abs::(), &isa::Instruction::F32Neg => self.run_neg::(), &isa::Instruction::F32Ceil => self.run_ceil::(), &isa::Instruction::F32Floor => self.run_floor::(), &isa::Instruction::F32Trunc => self.run_trunc::(), &isa::Instruction::F32Nearest => self.run_nearest::(), &isa::Instruction::F32Sqrt => self.run_sqrt::(), &isa::Instruction::F32Add => self.run_add::(), &isa::Instruction::F32Sub => self.run_sub::(), &isa::Instruction::F32Mul => self.run_mul::(), &isa::Instruction::F32Div => self.run_div::(), &isa::Instruction::F32Min => self.run_min::(), &isa::Instruction::F32Max => self.run_max::(), &isa::Instruction::F32Copysign => self.run_copysign::(), &isa::Instruction::F64Abs => self.run_abs::(), &isa::Instruction::F64Neg => self.run_neg::(), &isa::Instruction::F64Ceil => self.run_ceil::(), &isa::Instruction::F64Floor => self.run_floor::(), &isa::Instruction::F64Trunc => self.run_trunc::(), &isa::Instruction::F64Nearest => self.run_nearest::(), &isa::Instruction::F64Sqrt => self.run_sqrt::(), &isa::Instruction::F64Add => self.run_add::(), &isa::Instruction::F64Sub => self.run_sub::(), &isa::Instruction::F64Mul => self.run_mul::(), &isa::Instruction::F64Div => self.run_div::(), &isa::Instruction::F64Min => self.run_min::(), &isa::Instruction::F64Max => self.run_max::(), &isa::Instruction::F64Copysign => self.run_copysign::(), &isa::Instruction::I32WrapI64 => self.run_wrap::(), &isa::Instruction::I32TruncSF32 => self.run_trunc_to_int::(), &isa::Instruction::I32TruncUF32 => self.run_trunc_to_int::(), &isa::Instruction::I32TruncSF64 => self.run_trunc_to_int::(), &isa::Instruction::I32TruncUF64 => self.run_trunc_to_int::(), &isa::Instruction::I64ExtendSI32 => self.run_extend::(), &isa::Instruction::I64ExtendUI32 => self.run_extend::(), &isa::Instruction::I64TruncSF32 => self.run_trunc_to_int::(), &isa::Instruction::I64TruncUF32 => self.run_trunc_to_int::(), &isa::Instruction::I64TruncSF64 => self.run_trunc_to_int::(), &isa::Instruction::I64TruncUF64 => self.run_trunc_to_int::(), &isa::Instruction::F32ConvertSI32 => self.run_extend::(), &isa::Instruction::F32ConvertUI32 => self.run_extend::(), &isa::Instruction::F32ConvertSI64 => self.run_wrap::(), &isa::Instruction::F32ConvertUI64 => self.run_wrap::(), &isa::Instruction::F32DemoteF64 => self.run_wrap::(), &isa::Instruction::F64ConvertSI32 => self.run_extend::(), &isa::Instruction::F64ConvertUI32 => self.run_extend::(), &isa::Instruction::F64ConvertSI64 => self.run_extend::(), &isa::Instruction::F64ConvertUI64 => self.run_extend::(), &isa::Instruction::F64PromoteF32 => self.run_extend::(), &isa::Instruction::I32ReinterpretF32 => self.run_reinterpret::(), &isa::Instruction::I64ReinterpretF64 => self.run_reinterpret::(), &isa::Instruction::F32ReinterpretI32 => self.run_reinterpret::(), &isa::Instruction::F64ReinterpretI64 => self.run_reinterpret::(), } } fn run_unreachable( &mut self, _context: &mut FunctionContext, ) -> Result { Err(TrapKind::Unreachable) } fn run_br( &mut self, _context: &mut FunctionContext, target: isa::Target, ) -> Result { Ok(InstructionOutcome::Branch(target)) } fn run_br_nez(&mut self, target: isa::Target) -> Result { let condition = self.value_stack.pop_as(); if condition { Ok(InstructionOutcome::Branch(target)) } else { Ok(InstructionOutcome::RunNextInstruction) } } fn run_br_eqz(&mut self, target: isa::Target) -> Result { let condition = self.value_stack.pop_as(); if condition { Ok(InstructionOutcome::RunNextInstruction) } else { Ok(InstructionOutcome::Branch(target)) } } fn run_br_table(&mut self, targets: isa::BrTargets) -> Result { let index: u32 = self.value_stack.pop_as(); let dst = targets.get(index); Ok(InstructionOutcome::Branch(dst)) } fn run_return(&mut self, drop_keep: isa::DropKeep) -> Result { Ok(InstructionOutcome::Return(drop_keep)) } fn run_call( &mut self, context: &mut FunctionContext, func_idx: u32, ) -> Result { let func = context .module() .func_by_index(func_idx) .expect("Due to validation func should exists"); Ok(InstructionOutcome::ExecuteCall(func)) } fn run_call_indirect( &mut self, context: &mut FunctionContext, signature_idx: u32, ) -> Result { let table_func_idx: u32 = self.value_stack.pop_as(); let table = context .module() .table_by_index(DEFAULT_TABLE_INDEX) .expect("Due to validation table should exists"); let func_ref = table .get(table_func_idx) .map_err(|_| TrapKind::TableAccessOutOfBounds)? .ok_or_else(|| TrapKind::ElemUninitialized)?; { let actual_function_type = func_ref.signature(); let required_function_type = context .module() .signature_by_index(signature_idx) .expect("Due to validation type should exists"); if &*required_function_type != actual_function_type { return Err(TrapKind::UnexpectedSignature); } } Ok(InstructionOutcome::ExecuteCall(func_ref)) } fn run_drop(&mut self) -> Result { let _ = self.value_stack.pop(); Ok(InstructionOutcome::RunNextInstruction) } fn run_select(&mut self) -> Result { let (left, mid, right) = self.value_stack.pop_triple(); let condition = <_>::from_runtime_value_internal(right); let val = if condition { left } else { mid }; self.value_stack.push(val)?; Ok(InstructionOutcome::RunNextInstruction) } fn run_get_local(&mut self, index: u32) -> Result { let val = *self.value_stack.pick_mut(index as usize); self.value_stack.push(val)?; Ok(InstructionOutcome::RunNextInstruction) } fn run_set_local(&mut self, index: u32) -> Result { let val = self.value_stack.pop(); *self.value_stack.pick_mut(index as usize) = val; Ok(InstructionOutcome::RunNextInstruction) } fn run_tee_local(&mut self, index: u32) -> Result { let val = self.value_stack.top().clone(); *self.value_stack.pick_mut(index as usize) = val; Ok(InstructionOutcome::RunNextInstruction) } fn run_get_global( &mut self, context: &mut FunctionContext, index: u32, ) -> Result { let global = context .module() .global_by_index(index) .expect("Due to validation global should exists"); let val = global.get(); self.value_stack.push(val.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_set_global( &mut self, context: &mut FunctionContext, index: u32, ) -> Result { let val = self.value_stack.pop(); let global = context .module() .global_by_index(index) .expect("Due to validation global should exists"); global .set(val.with_type(global.value_type())) .expect("Due to validation set to a global should succeed"); Ok(InstructionOutcome::RunNextInstruction) } fn run_load( &mut self, context: &mut FunctionContext, offset: u32, ) -> Result where RuntimeValueInternal: From, T: LittleEndianConvert, { let raw_address = self.value_stack.pop_as(); let address = effective_address(offset, raw_address)?; let m = context .memory() .expect("Due to validation memory should exists"); let n: T = m .get_value(address) .map_err(|_| TrapKind::MemoryAccessOutOfBounds)?; self.value_stack.push(n.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_load_extend( &mut self, context: &mut FunctionContext, offset: u32, ) -> Result where T: ExtendInto, RuntimeValueInternal: From, T: LittleEndianConvert, { let raw_address = self.value_stack.pop_as(); let address = effective_address(offset, raw_address)?; let m = context .memory() .expect("Due to validation memory should exists"); let v: T = m .get_value(address) .map_err(|_| TrapKind::MemoryAccessOutOfBounds)?; let stack_value: U = v.extend_into(); self.value_stack .push(stack_value.into()) .map_err(Into::into) .map(|_| InstructionOutcome::RunNextInstruction) } fn run_store( &mut self, context: &mut FunctionContext, offset: u32, ) -> Result where T: FromRuntimeValueInternal, T: LittleEndianConvert, { let stack_value = self.value_stack.pop_as::(); let raw_address = self.value_stack.pop_as::(); let address = effective_address(offset, raw_address)?; let m = context .memory() .expect("Due to validation memory should exists"); m.set_value(address, stack_value) .map_err(|_| TrapKind::MemoryAccessOutOfBounds)?; Ok(InstructionOutcome::RunNextInstruction) } fn run_store_wrap( &mut self, context: &mut FunctionContext, offset: u32, ) -> Result where T: FromRuntimeValueInternal, T: WrapInto, U: LittleEndianConvert, { let stack_value: T = <_>::from_runtime_value_internal(self.value_stack.pop()); let stack_value = stack_value.wrap_into(); let raw_address = self.value_stack.pop_as::(); let address = effective_address(offset, raw_address)?; let m = context .memory() .expect("Due to validation memory should exists"); m.set_value(address, stack_value) .map_err(|_| TrapKind::MemoryAccessOutOfBounds)?; Ok(InstructionOutcome::RunNextInstruction) } fn run_current_memory( &mut self, context: &mut FunctionContext, ) -> Result { let m = context .memory() .expect("Due to validation memory should exists"); let s = m.current_size().0; self.value_stack.push(RuntimeValueInternal(s as _))?; Ok(InstructionOutcome::RunNextInstruction) } fn run_grow_memory( &mut self, context: &mut FunctionContext, ) -> Result { let pages: u32 = self.value_stack.pop_as(); let m = context .memory() .expect("Due to validation memory should exists"); let m = match m.grow(Pages(pages as usize)) { Ok(Pages(new_size)) => new_size as u32, Err(_) => u32::MAX, // Returns -1 (or 0xFFFFFFFF) in case of error. }; self.value_stack.push(RuntimeValueInternal(m as _))?; Ok(InstructionOutcome::RunNextInstruction) } fn run_const(&mut self, val: RuntimeValue) -> Result { self.value_stack .push(val.into()) .map_err(Into::into) .map(|_| InstructionOutcome::RunNextInstruction) } fn run_relop(&mut self, f: F) -> Result where T: FromRuntimeValueInternal, F: FnOnce(T, T) -> bool, { let (left, right) = self.value_stack.pop_pair_as::(); let v = if f(left, right) { RuntimeValueInternal(1) } else { RuntimeValueInternal(0) }; self.value_stack.push(v)?; Ok(InstructionOutcome::RunNextInstruction) } fn run_eqz(&mut self) -> Result where T: FromRuntimeValueInternal, T: PartialEq + Default, { let v = self.value_stack.pop_as::(); let v = RuntimeValueInternal(if v == Default::default() { 1 } else { 0 }); self.value_stack.push(v)?; Ok(InstructionOutcome::RunNextInstruction) } fn run_eq(&mut self) -> Result where T: FromRuntimeValueInternal + PartialEq, { self.run_relop(|left: T, right: T| left == right) } fn run_ne(&mut self) -> Result where T: FromRuntimeValueInternal + PartialEq, { self.run_relop(|left: T, right: T| left != right) } fn run_lt(&mut self) -> Result where T: FromRuntimeValueInternal + PartialOrd, { self.run_relop(|left: T, right: T| left < right) } fn run_gt(&mut self) -> Result where T: FromRuntimeValueInternal + PartialOrd, { self.run_relop(|left: T, right: T| left > right) } fn run_lte(&mut self) -> Result where T: FromRuntimeValueInternal + PartialOrd, { self.run_relop(|left: T, right: T| left <= right) } fn run_gte(&mut self) -> Result where T: FromRuntimeValueInternal + PartialOrd, { self.run_relop(|left: T, right: T| left >= right) } fn run_unop(&mut self, f: F) -> Result where F: FnOnce(T) -> U, T: FromRuntimeValueInternal, RuntimeValueInternal: From, { let v = self.value_stack.pop_as::(); let v = f(v); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_clz(&mut self) -> Result where RuntimeValueInternal: From, T: Integer + FromRuntimeValueInternal, { self.run_unop(|v: T| v.leading_zeros()) } fn run_ctz(&mut self) -> Result where RuntimeValueInternal: From, T: Integer + FromRuntimeValueInternal, { self.run_unop(|v: T| v.trailing_zeros()) } fn run_popcnt(&mut self) -> Result where RuntimeValueInternal: From, T: Integer + FromRuntimeValueInternal, { self.run_unop(|v: T| v.count_ones()) } fn run_add(&mut self) -> Result where RuntimeValueInternal: From, T: ArithmeticOps + FromRuntimeValueInternal, { let (left, right) = self.value_stack.pop_pair_as::(); let v = left.add(right); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_sub(&mut self) -> Result where RuntimeValueInternal: From, T: ArithmeticOps + FromRuntimeValueInternal, { let (left, right) = self.value_stack.pop_pair_as::(); let v = left.sub(right); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_mul(&mut self) -> Result where RuntimeValueInternal: From, T: ArithmeticOps + FromRuntimeValueInternal, { let (left, right) = self.value_stack.pop_pair_as::(); let v = left.mul(right); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_div(&mut self) -> Result where RuntimeValueInternal: From, T: TransmuteInto + FromRuntimeValueInternal, U: ArithmeticOps + TransmuteInto, { let (left, right) = self.value_stack.pop_pair_as::(); let (left, right) = (left.transmute_into(), right.transmute_into()); let v = left.div(right)?; let v = v.transmute_into(); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_rem(&mut self) -> Result where RuntimeValueInternal: From, T: TransmuteInto + FromRuntimeValueInternal, U: Integer + TransmuteInto, { let (left, right) = self.value_stack.pop_pair_as::(); let (left, right) = (left.transmute_into(), right.transmute_into()); let v = left.rem(right)?; let v = v.transmute_into(); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_and(&mut self) -> Result where RuntimeValueInternal: From<::Output>, T: ops::BitAnd + FromRuntimeValueInternal, { let (left, right) = self.value_stack.pop_pair_as::(); let v = left.bitand(right); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_or(&mut self) -> Result where RuntimeValueInternal: From<::Output>, T: ops::BitOr + FromRuntimeValueInternal, { let (left, right) = self.value_stack.pop_pair_as::(); let v = left.bitor(right); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_xor(&mut self) -> Result where RuntimeValueInternal: From<::Output>, T: ops::BitXor + FromRuntimeValueInternal, { let (left, right) = self.value_stack.pop_pair_as::(); let v = left.bitxor(right); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_shl(&mut self, mask: T) -> Result where RuntimeValueInternal: From<>::Output>, T: ops::Shl + ops::BitAnd + FromRuntimeValueInternal, { let (left, right) = self.value_stack.pop_pair_as::(); let v = left.shl(right & mask); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_shr(&mut self, mask: U) -> Result where RuntimeValueInternal: From, T: TransmuteInto + FromRuntimeValueInternal, U: ops::Shr + ops::BitAnd, >::Output: TransmuteInto, { let (left, right) = self.value_stack.pop_pair_as::(); let (left, right) = (left.transmute_into(), right.transmute_into()); let v = left.shr(right & mask); let v = v.transmute_into(); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_rotl(&mut self) -> Result where RuntimeValueInternal: From, T: Integer + FromRuntimeValueInternal, { let (left, right) = self.value_stack.pop_pair_as::(); let v = left.rotl(right); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_rotr(&mut self) -> Result where RuntimeValueInternal: From, T: Integer + FromRuntimeValueInternal, { let (left, right) = self.value_stack.pop_pair_as::(); let v = left.rotr(right); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_abs(&mut self) -> Result where RuntimeValueInternal: From, T: Float + FromRuntimeValueInternal, { self.run_unop(|v: T| v.abs()) } fn run_neg(&mut self) -> Result where RuntimeValueInternal: From<::Output>, T: ops::Neg + FromRuntimeValueInternal, { self.run_unop(|v: T| v.neg()) } fn run_ceil(&mut self) -> Result where RuntimeValueInternal: From, T: Float + FromRuntimeValueInternal, { self.run_unop(|v: T| v.ceil()) } fn run_floor(&mut self) -> Result where RuntimeValueInternal: From, T: Float + FromRuntimeValueInternal, { self.run_unop(|v: T| v.floor()) } fn run_trunc(&mut self) -> Result where RuntimeValueInternal: From, T: Float + FromRuntimeValueInternal, { self.run_unop(|v: T| v.trunc()) } fn run_nearest(&mut self) -> Result where RuntimeValueInternal: From, T: Float + FromRuntimeValueInternal, { self.run_unop(|v: T| v.nearest()) } fn run_sqrt(&mut self) -> Result where RuntimeValueInternal: From, T: Float + FromRuntimeValueInternal, { self.run_unop(|v: T| v.sqrt()) } fn run_min(&mut self) -> Result where RuntimeValueInternal: From, T: Float + FromRuntimeValueInternal, { let (left, right) = self.value_stack.pop_pair_as::(); let v = left.min(right); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_max(&mut self) -> Result where RuntimeValueInternal: From, T: Float + FromRuntimeValueInternal, { let (left, right) = self.value_stack.pop_pair_as::(); let v = left.max(right); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_copysign(&mut self) -> Result where RuntimeValueInternal: From, T: Float + FromRuntimeValueInternal, { let (left, right) = self.value_stack.pop_pair_as::(); let v = left.copysign(right); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_wrap(&mut self) -> Result where RuntimeValueInternal: From, T: WrapInto + FromRuntimeValueInternal, { self.run_unop(|v: T| v.wrap_into()) } fn run_trunc_to_int(&mut self) -> Result where RuntimeValueInternal: From, T: TryTruncateInto + FromRuntimeValueInternal, U: TransmuteInto, { let v = self.value_stack.pop_as::(); v.try_truncate_into() .map(|v| v.transmute_into()) .map(|v| self.value_stack.push(v.into())) .map(|_| InstructionOutcome::RunNextInstruction) } fn run_extend(&mut self) -> Result where RuntimeValueInternal: From, T: ExtendInto + FromRuntimeValueInternal, U: TransmuteInto, { let v = self.value_stack.pop_as::(); let v = v.extend_into().transmute_into(); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } fn run_reinterpret(&mut self) -> Result where RuntimeValueInternal: From, T: FromRuntimeValueInternal, T: TransmuteInto, { let v = self.value_stack.pop_as::(); let v = v.transmute_into(); self.value_stack.push(v.into())?; Ok(InstructionOutcome::RunNextInstruction) } } /// Function execution context. struct FunctionContext { /// Is context initialized. pub is_initialized: bool, /// Internal function reference. pub function: FuncRef, pub module: ModuleRef, pub memory: Option, /// Current instruction position. pub position: u32, } impl FunctionContext { pub fn new(function: FuncRef) -> Self { let module = match function.as_internal() { FuncInstanceInternal::Internal { module, .. } => module.upgrade().expect("module deallocated"), FuncInstanceInternal::Host { .. } => panic!("Host functions can't be called as internally defined functions; Thus FunctionContext can be created only with internally defined functions; qed"), }; let memory = module.memory_by_index(DEFAULT_MEMORY_INDEX); FunctionContext { is_initialized: false, function: function, module: ModuleRef(module), memory: memory, position: 0, } } pub fn is_initialized(&self) -> bool { self.is_initialized } pub fn initialize( &mut self, locals: &[Local], value_stack: &mut ValueStack, ) -> Result<(), TrapKind> { debug_assert!(!self.is_initialized); let num_locals = locals.iter().map(|l| l.count() as usize).sum(); let locals = vec![Default::default(); num_locals]; // TODO: Replace with extend. for local in locals { value_stack .push(local) .map_err(|_| TrapKind::StackOverflow)?; } self.is_initialized = true; Ok(()) } pub fn module(&self) -> ModuleRef { self.module.clone() } pub fn memory(&self) -> Option<&MemoryRef> { self.memory.as_ref() } } impl fmt::Debug for FunctionContext { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "FunctionContext") } } fn effective_address(address: u32, offset: u32) -> Result { match offset.checked_add(address) { None => Err(TrapKind::MemoryAccessOutOfBounds), Some(address) => Ok(address), } } fn prepare_function_args( signature: &Signature, caller_stack: &mut ValueStack, ) -> Vec { let mut out = signature .params() .iter() .rev() .map(|¶m_ty| caller_stack.pop().with_type(param_ty)) .collect::>(); out.reverse(); out } pub fn check_function_args(signature: &Signature, args: &[RuntimeValue]) -> Result<(), Trap> { if signature.params().len() != args.len() { return Err(TrapKind::UnexpectedSignature.into()); } if signature .params() .iter() .zip(args) .any(|(expected_type, param_value)| { let actual_type = param_value.value_type(); &actual_type != expected_type }) { return Err(TrapKind::UnexpectedSignature.into()); } Ok(()) } #[derive(Debug)] struct ValueStack { buf: Box<[RuntimeValueInternal]>, /// Index of the first free place in the stack. sp: usize, } impl ValueStack { fn with_limit(limit: usize) -> ValueStack { let mut buf = Vec::new(); buf.resize(limit, RuntimeValueInternal(0)); ValueStack { buf: buf.into_boxed_slice(), sp: 0, } } #[inline] fn drop_keep(&mut self, drop_keep: isa::DropKeep) { if drop_keep.keep == isa::Keep::Single { let top = *self.top(); *self.pick_mut(drop_keep.drop as usize + 1) = top; } let cur_stack_len = self.len(); self.sp = cur_stack_len - drop_keep.drop as usize; } #[inline] fn pop_as(&mut self) -> T where T: FromRuntimeValueInternal, { let value = self.pop(); T::from_runtime_value_internal(value) } #[inline] fn pop_pair_as(&mut self) -> (T, T) where T: FromRuntimeValueInternal, { let right = self.pop_as(); let left = self.pop_as(); (left, right) } #[inline] fn pop_triple( &mut self, ) -> ( RuntimeValueInternal, RuntimeValueInternal, RuntimeValueInternal, ) { let right = self.pop(); let mid = self.pop(); let left = self.pop(); (left, mid, right) } #[inline] fn top(&self) -> &RuntimeValueInternal { self.pick(1) } fn pick(&self, depth: usize) -> &RuntimeValueInternal { &self.buf[self.sp - depth] } #[inline] fn pick_mut(&mut self, depth: usize) -> &mut RuntimeValueInternal { &mut self.buf[self.sp - depth] } #[inline] fn pop(&mut self) -> RuntimeValueInternal { self.sp -= 1; self.buf[self.sp] } #[inline] fn push(&mut self, value: RuntimeValueInternal) -> Result<(), TrapKind> { let cell = self .buf .get_mut(self.sp) .ok_or_else(|| TrapKind::StackOverflow)?; *cell = value; self.sp += 1; Ok(()) } #[inline] fn len(&self) -> usize { self.sp } }