Skip to content

Commit

Permalink
Use try_from replacing cast in wasmtime-runtime.
Browse files Browse the repository at this point in the history
  • Loading branch information
arilotter authored and kubkon committed Jun 9, 2019
1 parent f3f6ab0 commit 8dc1d90
Show file tree
Hide file tree
Showing 4 changed files with 31 additions and 33 deletions.
1 change: 0 additions & 1 deletion wasmtime-runtime/Cargo.toml
Expand Up @@ -21,7 +21,6 @@ lazy_static = "1.2.0"
libc = { version = "0.2.48", default-features = false }
errno = "0.2.4"
memoffset = "0.3.0"
cast = { version = "0.2.2", default-features = false }
failure = { version = "0.1.3", default-features = false }
failure_derive = { version = "0.1.3", default-features = false }
indexmap = "1.0.2"
Expand Down
49 changes: 25 additions & 24 deletions wasmtime-runtime/src/instance.rs
Expand Up @@ -30,6 +30,7 @@ use indexmap;
use std::borrow::ToOwned;
use std::boxed::Box;
use std::collections::{HashMap, HashSet};
use std::convert::TryFrom;
use std::rc::Rc;
use std::string::{String, ToString};
use wasmtime_environ::{DataInitializer, Module, TableElements, VMOffsets};
Expand All @@ -42,7 +43,7 @@ fn signature_id(
#[allow(clippy::cast_ptr_alignment)]
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmshared_signature_id(index)));
.add(usize::try_from(offsets.vmctx_vmshared_signature_id(index)).unwrap());
*(ptr as *const VMSharedSignatureIndex)
}
}
Expand All @@ -55,7 +56,7 @@ fn imported_function<'vmctx>(
#[allow(clippy::cast_ptr_alignment)]
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmfunction_import(index)));
.add(usize::try_from(offsets.vmctx_vmfunction_import(index)).unwrap());
&*(ptr as *const VMFunctionImport)
}
}
Expand All @@ -68,7 +69,7 @@ fn imported_table<'vmctx>(
#[allow(clippy::cast_ptr_alignment)]
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmtable_import(index)));
.add(usize::try_from(offsets.vmctx_vmtable_import(index)).unwrap());
&*(ptr as *const VMTableImport)
}
}
Expand All @@ -81,7 +82,7 @@ fn imported_memory<'vmctx>(
#[allow(clippy::cast_ptr_alignment)]
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmmemory_import(index)));
.add(usize::try_from(offsets.vmctx_vmmemory_import(index)).unwrap());
&*(ptr as *const VMMemoryImport)
}
}
Expand All @@ -93,7 +94,7 @@ fn imported_global<'vmctx>(
) -> &'vmctx VMGlobalImport {
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmglobal_import(index)));
.add(usize::try_from(offsets.vmctx_vmglobal_import(index)).unwrap());
#[allow(clippy::cast_ptr_alignment)]
&*(ptr as *const VMGlobalImport)
}
Expand All @@ -106,7 +107,7 @@ fn table<'vmctx>(
) -> &'vmctx VMTableDefinition {
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmtable_definition(index)));
.add(usize::try_from(offsets.vmctx_vmtable_definition(index)).unwrap());
#[allow(clippy::cast_ptr_alignment)]
&*(ptr as *const VMTableDefinition)
}
Expand All @@ -119,7 +120,7 @@ fn table_mut<'vmctx>(
) -> &'vmctx mut VMTableDefinition {
unsafe {
let ptr = (vmctx as *mut VMContext as *mut u8)
.add(cast::usize(offsets.vmctx_vmtable_definition(index)));
.add(usize::try_from(offsets.vmctx_vmtable_definition(index)).unwrap());
#[allow(clippy::cast_ptr_alignment)]
&mut *(ptr as *mut VMTableDefinition)
}
Expand All @@ -132,7 +133,7 @@ fn memory<'vmctx>(
) -> &'vmctx VMMemoryDefinition {
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmmemory_definition(index)));
.add(usize::try_from(offsets.vmctx_vmmemory_definition(index)).unwrap());
#[allow(clippy::cast_ptr_alignment)]
&*(ptr as *const VMMemoryDefinition)
}
Expand All @@ -145,7 +146,7 @@ fn memory_mut<'vmctx>(
) -> &'vmctx mut VMMemoryDefinition {
unsafe {
let ptr = (vmctx as *mut VMContext as *mut u8)
.add(cast::usize(offsets.vmctx_vmmemory_definition(index)));
.add(usize::try_from(offsets.vmctx_vmmemory_definition(index)).unwrap());
#[allow(clippy::cast_ptr_alignment)]
&mut *(ptr as *mut VMMemoryDefinition)
}
Expand All @@ -158,7 +159,7 @@ fn global<'vmctx>(
) -> &'vmctx VMGlobalDefinition {
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmglobal_definition(index)));
.add(usize::try_from(offsets.vmctx_vmglobal_definition(index)).unwrap());
#[allow(clippy::cast_ptr_alignment)]
&*(ptr as *const VMGlobalDefinition)
}
Expand All @@ -171,7 +172,7 @@ fn global_mut<'vmctx>(
) -> &'vmctx mut VMGlobalDefinition {
unsafe {
let ptr = (vmctx as *mut VMContext as *mut u8)
.add(cast::usize(offsets.vmctx_vmglobal_definition(index)));
.add(usize::try_from(offsets.vmctx_vmglobal_definition(index)).unwrap());
#[allow(clippy::cast_ptr_alignment)]
&mut *(ptr as *mut VMGlobalDefinition)
}
Expand Down Expand Up @@ -237,7 +238,7 @@ impl Instance {
fn signature_ids_ptr(&mut self) -> *mut VMSharedSignatureIndex {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_signature_ids_begin()))
.add(usize::try_from(self.offsets.vmctx_signature_ids_begin()).unwrap())
as *mut VMSharedSignatureIndex
}
}
Expand All @@ -251,7 +252,7 @@ impl Instance {
fn imported_functions_ptr(&mut self) -> *mut VMFunctionImport {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_imported_functions_begin()))
.add(usize::try_from(self.offsets.vmctx_imported_functions_begin()).unwrap())
as *mut VMFunctionImport
}
}
Expand All @@ -266,7 +267,7 @@ impl Instance {
fn imported_tables_ptr(&mut self) -> *mut VMTableImport {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_imported_tables_begin()))
.add(usize::try_from(self.offsets.vmctx_imported_tables_begin()).unwrap())
as *mut VMTableImport
}
}
Expand All @@ -280,7 +281,7 @@ impl Instance {
fn imported_memories_ptr(&mut self) -> *mut VMMemoryImport {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_imported_memories_begin()))
.add(usize::try_from(self.offsets.vmctx_imported_memories_begin()).unwrap())
as *mut VMMemoryImport
}
}
Expand All @@ -294,7 +295,7 @@ impl Instance {
fn imported_globals_ptr(&mut self) -> *mut VMGlobalImport {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_imported_globals_begin()))
.add(usize::try_from(self.offsets.vmctx_imported_globals_begin()).unwrap())
as *mut VMGlobalImport
}
}
Expand All @@ -315,7 +316,7 @@ impl Instance {
fn tables_ptr(&mut self) -> *mut VMTableDefinition {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_tables_begin()))
.add(usize::try_from(self.offsets.vmctx_tables_begin()).unwrap())
as *mut VMTableDefinition
}
}
Expand All @@ -334,7 +335,7 @@ impl Instance {
fn memories_ptr(&mut self) -> *mut VMMemoryDefinition {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_memories_begin()))
.add(usize::try_from(self.offsets.vmctx_memories_begin()).unwrap())
as *mut VMMemoryDefinition
}
}
Expand All @@ -354,7 +355,7 @@ impl Instance {
fn globals_ptr(&mut self) -> *mut VMGlobalDefinition {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_globals_begin()))
.add(usize::try_from(self.offsets.vmctx_globals_begin()).unwrap())
as *mut VMGlobalDefinition
}
}
Expand Down Expand Up @@ -508,7 +509,7 @@ impl Instance {
let offsets = &self.offsets;
let begin = unsafe {
(&self.vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_tables_begin()))
.add(usize::try_from(offsets.vmctx_tables_begin()).unwrap())
} as *const VMTableDefinition;
let end: *const VMTableDefinition = table;
// TODO: Use `offset_from` once it stablizes.
Expand All @@ -524,7 +525,7 @@ impl Instance {
let offsets = &self.offsets;
let begin = unsafe {
(&self.vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_memories_begin()))
.add(usize::try_from(offsets.vmctx_memories_begin()).unwrap())
} as *const VMMemoryDefinition;
let end: *const VMMemoryDefinition = memory;
// TODO: Use `offset_from` once it stablizes.
Expand Down Expand Up @@ -653,7 +654,7 @@ impl InstanceHandle {

let mut instance_mmap = Mmap::with_at_least(
mem::size_of::<Instance>()
.checked_add(cast::usize(offsets.size_of_vmctx()))
.checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap())
.unwrap(),
)
.map_err(InstantiationError::Resource)?;
Expand Down Expand Up @@ -990,7 +991,7 @@ fn get_memory_init_start(init: &DataInitializer<'_>, instance: &mut Instance) ->
} else {
instance.imported_global(base).from
};
start += cast::usize(*unsafe { (*global).as_u32() });
start += usize::try_from(*unsafe { (*global).as_u32() }).unwrap();
}

start
Expand Down Expand Up @@ -1055,7 +1056,7 @@ fn get_table_init_start(init: &TableElements, instance: &mut Instance) -> usize
} else {
instance.imported_global(base).from
};
start += cast::usize(*unsafe { (*global).as_u32() });
start += usize::try_from(*unsafe { (*global).as_u32() }).unwrap();
}

start
Expand Down
7 changes: 4 additions & 3 deletions wasmtime-runtime/src/memory.rs
Expand Up @@ -4,6 +4,7 @@

use crate::mmap::Mmap;
use crate::vmcontext::VMMemoryDefinition;
use std::convert::TryFrom;
use std::string::String;
use wasmtime_environ::{MemoryPlan, MemoryStyle, WASM_MAX_PAGES, WASM_PAGE_SIZE};

Expand Down Expand Up @@ -101,9 +102,9 @@ impl LinearMemory {
return None;
}

let delta_bytes = cast::usize(delta) * WASM_PAGE_SIZE as usize;
let prev_bytes = cast::usize(prev_pages) * WASM_PAGE_SIZE as usize;
let new_bytes = cast::usize(new_pages) * WASM_PAGE_SIZE as usize;
let delta_bytes = usize::try_from(delta).unwrap() * WASM_PAGE_SIZE as usize;
let prev_bytes = usize::try_from(prev_pages).unwrap() * WASM_PAGE_SIZE as usize;
let new_bytes = usize::try_from(new_pages).unwrap() * WASM_PAGE_SIZE as usize;

if new_bytes > self.mmap.len() - self.offset_guard_size {
// If the new size is within the declared maximum, but needs more memory than we
Expand Down
7 changes: 2 additions & 5 deletions wasmtime-runtime/src/sig_registry.rs
Expand Up @@ -2,9 +2,9 @@
//! signature checking.

use crate::vmcontext::VMSharedSignatureIndex;
use cast;
use cranelift_codegen::ir;
use std::collections::{hash_map, HashMap};
use std::convert::TryFrom;

/// WebAssembly requires that the caller and callee signatures in an indirect
/// call must match. To implement this efficiently, keep a registry of all
Expand All @@ -29,10 +29,7 @@ impl SignatureRegistry {
match self.signature_hash.entry(sig.clone()) {
hash_map::Entry::Occupied(entry) => *entry.get(),
hash_map::Entry::Vacant(entry) => {
#[cfg(target_pointer_width = "32")]
let sig_id = VMSharedSignatureIndex::new(cast::u32(len));
#[cfg(target_pointer_width = "64")]
let sig_id = VMSharedSignatureIndex::new(cast::u32(len).unwrap());
let sig_id = VMSharedSignatureIndex::new(u32::try_from(len).unwrap());
entry.insert(sig_id);
sig_id
}
Expand Down

0 comments on commit 8dc1d90

Please sign in to comment.