Compare commits

...

9 Commits

Author SHA1 Message Date
Michael Mueller c96915b4df
Replace Mutex with RwLock 2019-06-20 08:24:57 +02:00
Michael Mueller b1bd7950d9
Fix panic message 2019-06-19 20:32:57 +02:00
Michael Mueller 70a2e612bc
Satisfy cargo fmt style remarks 2019-06-19 18:44:26 +02:00
Michael Mueller 81f34a6ab6
Bump version 2019-06-19 18:20:24 +02:00
Michael Mueller 91684c25ba
Test thread-safety in CI 2019-06-19 18:11:06 +02:00
Michael Mueller d08a08de51
Update Readme 2019-06-19 18:11:06 +02:00
Michael Mueller f111950cbd
Exclude deadlocking test in thread-safe mode 2019-06-19 18:11:06 +02:00
Michael Mueller 99b0e03e4e
Add feature flag for opt-in thread-safety
The `atomic` dependency is necessary because
Rusts's Atomic type doesn't support generic
inner values.
2019-06-19 18:11:03 +02:00
Michael Mueller 7546d3026d
Move buffer param into function
Needed for thread-safe mode, otherwise there will be borrow
problems with trying to borrow immutably, while already
borrowed mutably.
2019-06-19 18:06:50 +02:00
11 changed files with 182 additions and 119 deletions

View File

@ -1,6 +1,6 @@
[package]
name = "wasmi"
version = "0.4.5"
version = "0.4.6"
authors = ["Nikolay Volf <nikvolf@gmail.com>", "Svyatoslav Nikolsky <svyatonik@yandex.ru>", "Sergey Pepyakin <s.pepyakin@gmail.com>"]
license = "MIT/Apache-2.0"
readme = "README.md"
@ -17,6 +17,7 @@ memory_units = "0.3.0"
libm = { version = "0.1.2", optional = true }
num-rational = "0.2.2"
num-traits = "0.2.8"
atomic = { version = "0.4", optional = true }
[dev-dependencies]
assert_matches = "1.1"
@ -24,6 +25,7 @@ rand = "0.4.2"
wabt = "0.6"
[features]
threadsafe = ["atomic"]
default = ["std"]
# Disable for no_std support
std = [

View File

@ -18,6 +18,7 @@ git clone https://github.com/paritytech/wasmi.git --recursive
cd wasmi
cargo build
cargo test
cargo test --features threadsafe
```
# `no_std` support
@ -39,6 +40,19 @@ Also, code related to `std::error` is disabled.
Floating point operations in `no_std` use [`libm`](https://crates.io/crates/libm), which sometimes panics in debug mode (https://github.com/japaric/libm/issues/4).
So make sure to either use release builds or avoid WASM with floating point operations, for example by using [`deny_floating_point`](https://docs.rs/wasmi/0.4.0/wasmi/struct.Module.html#method.deny_floating_point).
# Thread-safe support
This crate supports thread-safe environments.
Enable the `threadsafe` feature and Rust's thread-safe data structures will be used.
```toml
[dependencies]
parity-wasm = {
version = "0.31",
default-features = true,
features = "threadsafe"
}
```
# License
`wasmi` is primarily distributed under the terms of both the MIT

View File

@ -1,6 +1,5 @@
#[allow(unused_imports)]
use alloc::prelude::v1::*;
use alloc::rc::{Rc, Weak};
use core::fmt;
use host::Externals;
use isa;
@ -17,7 +16,7 @@ use {Signature, Trap};
///
/// [`FuncInstance`]: struct.FuncInstance.html
#[derive(Clone, Debug)]
pub struct FuncRef(Rc<FuncInstance>);
pub struct FuncRef(::MyRc<FuncInstance>);
impl ::core::ops::Deref for FuncRef {
type Target = FuncInstance;
@ -45,9 +44,9 @@ pub struct FuncInstance(FuncInstanceInternal);
#[derive(Clone)]
pub(crate) enum FuncInstanceInternal {
Internal {
signature: Rc<Signature>,
module: Weak<ModuleInstance>,
body: Rc<FuncBody>,
signature: ::MyRc<Signature>,
module: ::MyWeak<ModuleInstance>,
body: ::MyRc<FuncBody>,
},
Host {
signature: Signature,
@ -84,7 +83,7 @@ impl FuncInstance {
signature,
host_func_index,
};
FuncRef(Rc::new(FuncInstance(func)))
FuncRef(::MyRc::new(FuncInstance(func)))
}
/// Returns [signature] of this function instance.
@ -104,21 +103,21 @@ impl FuncInstance {
}
pub(crate) fn alloc_internal(
module: Weak<ModuleInstance>,
signature: Rc<Signature>,
module: ::MyWeak<ModuleInstance>,
signature: ::MyRc<Signature>,
body: FuncBody,
) -> FuncRef {
let func = FuncInstanceInternal::Internal {
signature,
module: module,
body: Rc::new(body),
body: ::MyRc::new(body),
};
FuncRef(Rc::new(FuncInstance(func)))
FuncRef(::MyRc::new(FuncInstance(func)))
}
pub(crate) fn body(&self) -> Option<Rc<FuncBody>> {
pub(crate) fn body(&self) -> Option<::MyRc<FuncBody>> {
match *self.as_internal() {
FuncInstanceInternal::Internal { ref body, .. } => Some(Rc::clone(body)),
FuncInstanceInternal::Internal { ref body, .. } => Some(::MyRc::clone(body)),
FuncInstanceInternal::Host { .. } => None,
}
}

View File

@ -1,5 +1,3 @@
use alloc::rc::Rc;
use core::cell::Cell;
use parity_wasm::elements::ValueType as EValueType;
use types::ValueType;
use value::RuntimeValue;
@ -11,7 +9,7 @@ use Error;
///
/// [`GlobalInstance`]: struct.GlobalInstance.html
#[derive(Clone, Debug)]
pub struct GlobalRef(Rc<GlobalInstance>);
pub struct GlobalRef(::MyRc<GlobalInstance>);
impl ::core::ops::Deref for GlobalRef {
type Target = GlobalInstance;
@ -33,7 +31,7 @@ impl ::core::ops::Deref for GlobalRef {
/// [`I64`]: enum.RuntimeValue.html#variant.I64
#[derive(Debug)]
pub struct GlobalInstance {
val: Cell<RuntimeValue>,
val: ::MyCell<RuntimeValue>,
mutable: bool,
}
@ -43,8 +41,8 @@ impl GlobalInstance {
/// Since it is possible to export only immutable globals,
/// users likely want to set `mutable` to `false`.
pub fn alloc(val: RuntimeValue, mutable: bool) -> GlobalRef {
GlobalRef(Rc::new(GlobalInstance {
val: Cell::new(val),
GlobalRef(::MyRc::new(GlobalInstance {
val: ::MyCell::new(val),
mutable,
}))
}

View File

@ -396,6 +396,12 @@ mod table;
mod types;
mod value;
#[cfg(feature = "threadsafe")]
mod threadsafe;
#[cfg(not(feature = "threadsafe"))]
mod not_threadsafe;
#[cfg(test)]
mod tests;
@ -410,6 +416,12 @@ pub use self::table::{TableInstance, TableRef};
pub use self::types::{GlobalDescriptor, MemoryDescriptor, Signature, TableDescriptor, ValueType};
pub use self::value::{Error as ValueError, FromRuntimeValue, LittleEndianConvert, RuntimeValue};
#[cfg(feature = "threadsafe")]
pub use self::threadsafe::*;
#[cfg(not(feature = "threadsafe"))]
pub use self::not_threadsafe::*;
/// WebAssembly-specific sizes and units.
pub mod memory_units {
pub use memory_units_crate::wasm32::*;

View File

@ -1,12 +1,6 @@
#[allow(unused_imports)]
use alloc::prelude::v1::*;
use alloc::rc::Rc;
use core::{
cell::{Cell, RefCell},
cmp, fmt,
ops::Range,
u32,
};
use core::{cmp, fmt, ops::Range, u32};
use memory_units::{Bytes, Pages, RoundUpTo};
use parity_wasm::elements::ResizableLimits;
use value::LittleEndianConvert;
@ -26,7 +20,7 @@ pub const LINEAR_MEMORY_PAGE_SIZE: Bytes = Bytes(65536);
/// [`MemoryInstance`]: struct.MemoryInstance.html
///
#[derive(Clone, Debug)]
pub struct MemoryRef(Rc<MemoryInstance>);
pub struct MemoryRef(::MyRc<MemoryInstance>);
impl ::core::ops::Deref for MemoryRef {
type Target = MemoryInstance;
@ -52,11 +46,11 @@ pub struct MemoryInstance {
/// Memory limits.
limits: ResizableLimits,
/// Linear memory buffer with lazy allocation.
buffer: RefCell<Vec<u8>>,
buffer: ::MyRefCell<Vec<u8>>,
initial: Pages,
current_size: Cell<usize>,
current_size: ::MyCell<usize>,
maximum: Option<Pages>,
lowest_used: Cell<u32>,
lowest_used: ::MyCell<u32>,
}
impl fmt::Debug for MemoryInstance {
@ -127,7 +121,7 @@ impl MemoryInstance {
}
let memory = MemoryInstance::new(initial, maximum);
Ok(MemoryRef(Rc::new(memory)))
Ok(MemoryRef(::MyRc::new(memory)))
}
/// Create new linear memory instance.
@ -137,11 +131,11 @@ impl MemoryInstance {
let initial_size: Bytes = initial.into();
MemoryInstance {
limits: limits,
buffer: RefCell::new(Vec::with_capacity(4096)),
buffer: ::MyRefCell::new(Vec::with_capacity(4096)),
initial: initial,
current_size: Cell::new(initial_size.0),
current_size: ::MyCell::new(initial_size.0),
maximum: maximum,
lowest_used: Cell::new(u32::max_value()),
lowest_used: ::MyCell::new(u32::max_value()),
}
}
@ -204,9 +198,9 @@ impl MemoryInstance {
/// Get value from memory at given offset.
pub fn get_value<T: LittleEndianConvert>(&self, offset: u32) -> Result<T, Error> {
let mut buffer = self.buffer.borrow_mut();
let region =
self.checked_region(&mut buffer, offset as usize, ::core::mem::size_of::<T>())?;
let region = self.checked_region(offset as usize, ::core::mem::size_of::<T>())?;
let buffer = self.buffer.borrow();
Ok(T::from_little_endian(&buffer[region.range()]).expect("Slice size is checked"))
}
@ -217,9 +211,9 @@ impl MemoryInstance {
///
/// [`get_into`]: #method.get_into
pub fn get(&self, offset: u32, size: usize) -> Result<Vec<u8>, Error> {
let mut buffer = self.buffer.borrow_mut();
let region = self.checked_region(&mut buffer, offset as usize, size)?;
let region = self.checked_region(offset as usize, size)?;
let buffer = self.buffer.borrow();
Ok(buffer[region.range()].to_vec())
}
@ -229,9 +223,9 @@ impl MemoryInstance {
///
/// Returns `Err` if the specified region is out of bounds.
pub fn get_into(&self, offset: u32, target: &mut [u8]) -> Result<(), Error> {
let mut buffer = self.buffer.borrow_mut();
let region = self.checked_region(&mut buffer, offset as usize, target.len())?;
let region = self.checked_region(offset as usize, target.len())?;
let buffer = self.buffer.borrow();
target.copy_from_slice(&buffer[region.range()]);
Ok(())
@ -239,14 +233,12 @@ impl MemoryInstance {
/// Copy data in the memory at given offset.
pub fn set(&self, offset: u32, value: &[u8]) -> Result<(), Error> {
let mut buffer = self.buffer.borrow_mut();
let range = self
.checked_region(&mut buffer, offset as usize, value.len())?
.range();
let range = self.checked_region(offset as usize, value.len())?.range();
if offset < self.lowest_used.get() {
self.lowest_used.set(offset);
}
let mut buffer = self.buffer.borrow_mut();
buffer[range].copy_from_slice(value);
Ok(())
@ -254,13 +246,13 @@ impl MemoryInstance {
/// Copy value in the memory at given offset.
pub fn set_value<T: LittleEndianConvert>(&self, offset: u32, value: T) -> Result<(), Error> {
let mut buffer = self.buffer.borrow_mut();
let range = self
.checked_region(&mut buffer, offset as usize, ::core::mem::size_of::<T>())?
.checked_region(offset as usize, ::core::mem::size_of::<T>())?
.range();
if offset < self.lowest_used.get() {
self.lowest_used.set(offset);
}
let mut buffer = self.buffer.borrow_mut();
value.into_little_endian(&mut buffer[range]);
Ok(())
}
@ -299,15 +291,8 @@ impl MemoryInstance {
Ok(size_before_grow)
}
fn checked_region<B>(
&self,
buffer: &mut B,
offset: usize,
size: usize,
) -> Result<CheckedRegion, Error>
where
B: ::core::ops::DerefMut<Target = Vec<u8>>,
{
fn checked_region(&self, offset: usize, size: usize) -> Result<CheckedRegion, Error> {
let mut buffer = self.buffer.borrow_mut();
let end = offset.checked_add(size).ok_or_else(|| {
Error::Memory(format!(
"trying to access memory block of size {} from offset {}",
@ -334,17 +319,13 @@ impl MemoryInstance {
})
}
fn checked_region_pair<B>(
fn checked_region_pair(
&self,
buffer: &mut B,
offset1: usize,
size1: usize,
offset2: usize,
size2: usize,
) -> Result<(CheckedRegion, CheckedRegion), Error>
where
B: ::core::ops::DerefMut<Target = Vec<u8>>,
{
) -> Result<(CheckedRegion, CheckedRegion), Error> {
let end1 = offset1.checked_add(size1).ok_or_else(|| {
Error::Memory(format!(
"trying to access memory block of size {} from offset {}",
@ -359,6 +340,7 @@ impl MemoryInstance {
))
})?;
let mut buffer = self.buffer.borrow_mut();
let max = cmp::max(end1, end2);
if max <= self.current_size.get() && buffer.len() < max {
buffer.resize(max, 0);
@ -402,15 +384,14 @@ impl MemoryInstance {
///
/// Returns `Err` if either of specified regions is out of bounds.
pub fn copy(&self, src_offset: usize, dst_offset: usize, len: usize) -> Result<(), Error> {
let mut buffer = self.buffer.borrow_mut();
let (read_region, write_region) =
self.checked_region_pair(&mut buffer, src_offset, len, dst_offset, len)?;
self.checked_region_pair(src_offset, len, dst_offset, len)?;
if dst_offset < self.lowest_used.get() as usize {
self.lowest_used.set(dst_offset as u32);
}
let mut buffer = self.buffer.borrow_mut();
unsafe {
::core::ptr::copy(
buffer[read_region.range()].as_ptr(),
@ -439,10 +420,8 @@ impl MemoryInstance {
dst_offset: usize,
len: usize,
) -> Result<(), Error> {
let mut buffer = self.buffer.borrow_mut();
let (read_region, write_region) =
self.checked_region_pair(&mut buffer, src_offset, len, dst_offset, len)?;
self.checked_region_pair(src_offset, len, dst_offset, len)?;
if read_region.intersects(&write_region) {
return Err(Error::Memory(format!(
@ -454,6 +433,7 @@ impl MemoryInstance {
self.lowest_used.set(dst_offset as u32);
}
let mut buffer = self.buffer.borrow_mut();
unsafe {
::core::ptr::copy_nonoverlapping(
buffer[read_region.range()].as_ptr(),
@ -475,28 +455,21 @@ impl MemoryInstance {
dst_offset: usize,
len: usize,
) -> Result<(), Error> {
if Rc::ptr_eq(&src.0, &dst.0) {
if ::MyRc::ptr_eq(&src.0, &dst.0) {
// `transfer` is invoked with with same source and destination. Let's assume that regions may
// overlap and use `copy`.
return src.copy(src_offset, dst_offset, len);
}
// Because memory references point to different memory instances, it is safe to `borrow_mut`
// both buffers at once (modulo `with_direct_access_mut`).
let mut src_buffer = src.buffer.borrow_mut();
let mut dst_buffer = dst.buffer.borrow_mut();
let src_range = src
.checked_region(&mut src_buffer, src_offset, len)?
.range();
let dst_range = dst
.checked_region(&mut dst_buffer, dst_offset, len)?
.range();
let src_range = src.checked_region(src_offset, len)?.range();
let dst_range = dst.checked_region(dst_offset, len)?.range();
if dst_offset < dst.lowest_used.get() as usize {
dst.lowest_used.set(dst_offset as u32);
}
let mut dst_buffer = dst.buffer.borrow_mut();
let src_buffer = src.buffer.borrow();
dst_buffer[dst_range].copy_from_slice(&src_buffer[src_range]);
Ok(())
@ -510,14 +483,13 @@ impl MemoryInstance {
///
/// Returns `Err` if the specified region is out of bounds.
pub fn clear(&self, offset: usize, new_val: u8, len: usize) -> Result<(), Error> {
let mut buffer = self.buffer.borrow_mut();
let range = self.checked_region(&mut buffer, offset, len)?.range();
let range = self.checked_region(offset, len)?.range();
if offset < self.lowest_used.get() as usize {
self.lowest_used.set(offset as u32);
}
let mut buffer = self.buffer.borrow_mut();
for val in &mut buffer[range] {
*val = new_val
}
@ -569,7 +541,6 @@ mod tests {
use super::{MemoryInstance, MemoryRef, LINEAR_MEMORY_PAGE_SIZE};
use memory_units::Pages;
use std::rc::Rc;
use Error;
#[test]
@ -671,8 +642,8 @@ mod tests {
#[test]
fn transfer_works() {
let src = MemoryRef(Rc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
let dst = MemoryRef(Rc::new(create_memory(&[
let src = MemoryRef(::MyRc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
let dst = MemoryRef(::MyRc::new(create_memory(&[
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
])));
@ -687,7 +658,7 @@ mod tests {
#[test]
fn transfer_still_works_with_same_memory() {
let src = MemoryRef(Rc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
let src = MemoryRef(::MyRc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
MemoryInstance::transfer(&src, 4, &src, 0, 3).unwrap();
@ -696,7 +667,7 @@ mod tests {
#[test]
fn transfer_oob_with_same_memory_errors() {
let src = MemoryRef(Rc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
let src = MemoryRef(::MyRc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
assert!(MemoryInstance::transfer(&src, 65535, &src, 0, 3).is_err());
// Check that memories content left untouched
@ -705,8 +676,8 @@ mod tests {
#[test]
fn transfer_oob_errors() {
let src = MemoryRef(Rc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
let dst = MemoryRef(Rc::new(create_memory(&[
let src = MemoryRef(::MyRc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
let dst = MemoryRef(::MyRc::new(create_memory(&[
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
])));
@ -756,6 +727,8 @@ mod tests {
});
}
// this test works only in the non-thread-safe variant, it deadlocks otherwise.
#[cfg(not(feature = "threadsafe"))]
#[should_panic]
#[test]
fn zero_copy_panics_on_nested_access() {

View File

@ -1,13 +1,10 @@
#[allow(unused_imports)]
use alloc::prelude::v1::*;
use alloc::rc::Rc;
use core::cell::RefCell;
use core::fmt;
use Trap;
use alloc::collections::BTreeMap;
use core::cell::Ref;
use func::{FuncBody, FuncInstance, FuncRef};
use global::{GlobalInstance, GlobalRef};
use host::Externals;
@ -35,7 +32,7 @@ use {Error, MemoryInstance, Module, RuntimeValue, Signature, TableInstance};
///
/// [`ModuleInstance`]: struct.ModuleInstance.html
#[derive(Clone, Debug)]
pub struct ModuleRef(pub(crate) Rc<ModuleInstance>);
pub struct ModuleRef(pub(crate) ::MyRc<ModuleInstance>);
impl ::core::ops::Deref for ModuleRef {
type Target = ModuleInstance;
@ -154,23 +151,23 @@ impl ExternVal {
/// [`invoke_export`]: #method.invoke_export
#[derive(Debug)]
pub struct ModuleInstance {
signatures: RefCell<Vec<Rc<Signature>>>,
tables: RefCell<Vec<TableRef>>,
funcs: RefCell<Vec<FuncRef>>,
memories: RefCell<Vec<MemoryRef>>,
globals: RefCell<Vec<GlobalRef>>,
exports: RefCell<BTreeMap<String, ExternVal>>,
signatures: ::MyRefCell<Vec<::MyRc<Signature>>>,
tables: ::MyRefCell<Vec<TableRef>>,
funcs: ::MyRefCell<Vec<FuncRef>>,
memories: ::MyRefCell<Vec<MemoryRef>>,
globals: ::MyRefCell<Vec<GlobalRef>>,
exports: ::MyRefCell<BTreeMap<String, ExternVal>>,
}
impl ModuleInstance {
fn default() -> Self {
ModuleInstance {
funcs: RefCell::new(Vec::new()),
signatures: RefCell::new(Vec::new()),
tables: RefCell::new(Vec::new()),
memories: RefCell::new(Vec::new()),
globals: RefCell::new(Vec::new()),
exports: RefCell::new(BTreeMap::new()),
funcs: ::MyRefCell::new(Vec::new()),
signatures: ::MyRefCell::new(Vec::new()),
tables: ::MyRefCell::new(Vec::new()),
memories: ::MyRefCell::new(Vec::new()),
globals: ::MyRefCell::new(Vec::new()),
exports: ::MyRefCell::new(BTreeMap::new()),
}
}
@ -190,7 +187,7 @@ impl ModuleInstance {
self.funcs.borrow().get(idx as usize).cloned()
}
pub(crate) fn signature_by_index(&self, idx: u32) -> Option<Rc<Signature>> {
pub(crate) fn signature_by_index(&self, idx: u32) -> Option<::MyRc<Signature>> {
self.signatures.borrow().get(idx as usize).cloned()
}
@ -198,7 +195,7 @@ impl ModuleInstance {
self.funcs.borrow_mut().push(func);
}
fn push_signature(&self, signature: Rc<Signature>) {
fn push_signature(&self, signature: ::MyRc<Signature>) {
self.signatures.borrow_mut().push(signature)
}
@ -216,7 +213,7 @@ impl ModuleInstance {
/// Access all globals. This is a non-standard API so it's unlikely to be
/// portable to other engines.
pub fn globals<'a>(&self) -> Ref<Vec<GlobalRef>> {
pub fn globals<'a>(&self) -> ::MyRefRead<Vec<GlobalRef>> {
self.globals.borrow()
}
@ -229,10 +226,10 @@ impl ModuleInstance {
extern_vals: I,
) -> Result<ModuleRef, Error> {
let module = loaded_module.module();
let instance = ModuleRef(Rc::new(ModuleInstance::default()));
let instance = ModuleRef(::MyRc::new(ModuleInstance::default()));
for &Type::Function(ref ty) in module.type_section().map(|ts| ts.types()).unwrap_or(&[]) {
let signature = Rc::new(Signature::from_elements(ty));
let signature = ::MyRc::new(Signature::from_elements(ty));
instance.push_signature(signature);
}
@ -326,8 +323,11 @@ impl ModuleInstance {
locals: body.locals().to_vec(),
code: code,
};
let func_instance =
FuncInstance::alloc_internal(Rc::downgrade(&instance.0), signature, func_body);
let func_instance = FuncInstance::alloc_internal(
::MyRc::downgrade(&instance.0),
signature,
func_body,
);
instance.push_func(func_instance);
}
}

6
src/not_threadsafe.rs Normal file
View File

@ -0,0 +1,6 @@
pub use alloc::rc::Rc as MyRc;
pub use alloc::rc::Weak as MyWeak;
pub use core::cell::Cell as MyCell;
pub use core::cell::Ref as MyRefRead;
pub use core::cell::Ref as MyRefWrite;
pub use core::cell::RefCell as MyRefCell;

View File

@ -1,7 +1,5 @@
#[allow(unused_imports)]
use alloc::prelude::v1::*;
use alloc::rc::Rc;
use core::cell::RefCell;
use core::fmt;
use core::u32;
use func::FuncRef;
@ -16,7 +14,7 @@ use Error;
/// [`TableInstance`]: struct.TableInstance.html
///
#[derive(Clone, Debug)]
pub struct TableRef(Rc<TableInstance>);
pub struct TableRef(::MyRc<TableInstance>);
impl ::core::ops::Deref for TableRef {
type Target = TableInstance;
@ -42,7 +40,7 @@ pub struct TableInstance {
/// Table limits.
limits: ResizableLimits,
/// Table memory buffer.
buffer: RefCell<Vec<Option<FuncRef>>>,
buffer: ::MyRefCell<Vec<Option<FuncRef>>>,
}
impl fmt::Debug for TableInstance {
@ -67,13 +65,13 @@ impl TableInstance {
/// Returns `Err` if `initial_size` is greater than `maximum_size`.
pub fn alloc(initial_size: u32, maximum_size: Option<u32>) -> Result<TableRef, Error> {
let table = TableInstance::new(ResizableLimits::new(initial_size, maximum_size))?;
Ok(TableRef(Rc::new(table)))
Ok(TableRef(::MyRc::new(table)))
}
fn new(limits: ResizableLimits) -> Result<TableInstance, Error> {
check_limits(&limits)?;
Ok(TableInstance {
buffer: RefCell::new(vec![None; limits.initial() as usize]),
buffer: ::MyRefCell::new(vec![None; limits.initial() as usize]),
limits: limits,
})
}

59
src/threadsafe.rs Normal file
View File

@ -0,0 +1,59 @@
extern crate atomic;
use alloc::sync::{Arc, RwLock};
pub use self::atomic::{Atomic, Ordering::Relaxed as Ordering};
pub use alloc::sync::{
Arc as MyRc, RwLockReadGuard as MyRefRead, RwLockWriteGuard as MyRefWrite, Weak as MyWeak,
};
/// Thread-safe wrapper which can be used in place of a `RefCell`.
#[derive(Debug)]
pub struct MyRefCell<T>(Arc<RwLock<T>>);
impl<T> MyRefCell<T> {
/// Create new wrapper object.
pub fn new(obj: T) -> MyRefCell<T> {
MyRefCell(Arc::new(RwLock::new(obj)))
}
/// Borrow a `MyRef` to the inner value.
pub fn borrow(&self) -> ::MyRefRead<T> {
self.0
.read()
.expect("failed to acquire lock while trying to borrow")
}
/// Borrow a mutable `MyRef` to the inner value.
pub fn borrow_mut(&self) -> ::MyRefWrite<T> {
self.0
.write()
.expect("failed to acquire lock while trying to borrow mutably")
}
}
/// Thread-safe wrapper which can be used in place of a `Cell`.
#[derive(Debug)]
pub struct MyCell<T>(Atomic<T>)
where
T: Copy;
impl<T> MyCell<T>
where
T: Copy,
{
/// Create new wrapper object.
pub fn new(obj: T) -> MyCell<T> {
MyCell(Atomic::new(obj))
}
/// Returns the inner value.
pub fn get(&self) -> T {
self.0.load(::Ordering)
}
/// Sets the inner value.
pub fn set(&self, val: T) {
self.0.store(val, ::Ordering);
}
}

View File

@ -16,4 +16,6 @@ cd $(dirname $0)
time cargo test --all ${EXTRA_ARGS}
time cargo test --all --features threadsafe
cd -