Improve memory locality of checking for duplicate exports

Using a sorted slice gives us the same O(N log N) worst case execution
time as using a BTreeMap, but using a single allocation as with HashMap,
so that we should see better memory locality and hence better constant
factors when checking for duplicate exports.
This commit is contained in:
Adam Reichold 2019-06-11 19:23:32 +02:00
parent 833c950fe6
commit 1cea37cbff
1 changed files with 16 additions and 10 deletions

View File

@ -27,12 +27,10 @@ use core::fmt;
#[cfg(feature = "std")]
use std::error;
use alloc::collections::BTreeSet;
use self::context::ModuleContextBuilder;
use parity_wasm::elements::{
BlockType, External, FuncBody, GlobalEntry, GlobalType, InitExpr, Instruction, Internal,
MemoryType, Module, ResizableLimits, TableType, Type, ValueType,
BlockType, ExportEntry, External, FuncBody, GlobalEntry, GlobalType, InitExpr, Instruction,
Internal, MemoryType, Module, ResizableLimits, TableType, Type, ValueType,
};
pub mod context;
@ -247,13 +245,21 @@ pub fn validate_module<V: Validator>(module: &Module) -> Result<V::Output, Error
// validate export section
if let Some(export_section) = module.export_section() {
let mut export_names = BTreeSet::new();
for export in export_section.entries() {
// BTreeSet::insert returns false if item already in set.
let duplicate = export_names.insert(export.field()) == false;
if duplicate {
return Err(Error(format!("duplicate export {}", export.field())));
let mut export_names = export_section
.entries()
.iter()
.map(ExportEntry::field)
.collect::<Vec<_>>();
export_names.sort_unstable();
for (fst, snd) in export_names.iter().zip(export_names.iter().skip(1)) {
if fst == snd {
return Err(Error(format!("duplicate export {}", fst)));
}
}
for export in export_section.entries() {
match *export.internal() {
Internal::Function(function_index) => {
context.require_function(function_index)?;